From 74bb30b0f6d516be31432614e532960eb44fe7ef Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 14 Nov 2024 22:19:17 +0000 Subject: [PATCH 1/3] feat: update Go Datastore import path feat: update Go Bigtable import path feat: release MetadataJob APIs and related resources in GA feat: expose create time in DataScanJobAPI docs: correct API documentation feat: release metadata export in private preview feat: Add data_version field to AspectSource docs: add info about schema changes for BigQuery metadata in Dataplex Catalog docs: Add Identifier for `name` in message `.google.cloud.dataplex.v1.MetadataJob` per https://google.aip.dev/cloud/2510 docs: Add comment for field `type` in message `.google.cloud.dataplex.v1.MetadataJob` per https://linter.aip.dev/192/has-comments docs: Add comment for field `status` in message `.google.cloud.dataplex.v1.MetadataJob` per https://linter.aip.dev/192/has-comments docs: Add link to fully qualified names documentation feat: add annotations in CreateMetadataJob, GetMetadataJob, ListMetaDataJobs and CancelMetadataJob for cloud audit logging feat: Add new Data Discovery scan type in Datascan docs: Scrub descriptions for standalone discovery scans docs: correct the dimensions for data quality rules feat: A new field `suspended` is added to DataScans feat: expose create time to customers feat: Add a TABLE_PUBLISHED field in DiscoveryEvent feat: Add a TABLE_UPDATED field in DiscoveryEvent feat: Add a TABLE_IGNORED field in DiscoveryEvent feat: Add a TABLE_DELETED field in DiscoveryEvent feat: Add a DATA_DISCOVERY enum type in DataScanEvent feat: Add a DataDiscoveryAppliedConfigs message docs: A comment for message `DataScanEvent` is changed feat: Add an Issue field to DiscoveryEvent.ActionDetails to output the action message in Cloud Logs feat: A new enum `TableType` is added feat: A new message `TableDetails` is added feat: A new field `datascan_id` is added to message `.google.cloud.dataplex.v1.DiscoveryEvent` feat: A new field `table` is added to message `.google.cloud.dataplex.v1.DiscoveryEvent` docs: Dataplex Tasks do not support Dataplex Content path as a direct input anymore PiperOrigin-RevId: 696600248 Source-Link: https://github.com/googleapis/googleapis/commit/43e53378966d2539cd44d31d9a33d5259fc4c73c Source-Link: https://github.com/googleapis/googleapis-gen/commit/1f67afb4970ea4c3fe535021a5fc8ba16968a026 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwbGV4Ly5Pd2xCb3QueWFtbCIsImgiOiIxZjY3YWZiNDk3MGVhNGMzZmU1MzUwMjFhNWZjOGJhMTY5NjhhMDI2In0= --- .../google-cloud-dataplex/v1/.coveragerc | 13 + .../google-cloud-dataplex/v1/.flake8 | 33 + .../google-cloud-dataplex/v1/MANIFEST.in | 2 + .../google-cloud-dataplex/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../google-cloud-dataplex/v1/docs/conf.py | 376 + .../v1/docs/dataplex_v1/catalog_service.rst | 10 + .../v1/docs/dataplex_v1/content_service.rst | 10 + .../v1/docs/dataplex_v1/data_scan_service.rst | 10 + .../dataplex_v1/data_taxonomy_service.rst | 10 + .../v1/docs/dataplex_v1/dataplex_service.rst | 10 + .../v1/docs/dataplex_v1/metadata_service.rst | 10 + .../v1/docs/dataplex_v1/services_.rst | 11 + .../v1/docs/dataplex_v1/types_.rst | 6 + .../google-cloud-dataplex/v1/docs/index.rst | 7 + .../v1/google/cloud/dataplex/__init__.py | 407 + .../v1/google/cloud/dataplex/gapic_version.py | 16 + .../v1/google/cloud/dataplex/py.typed | 2 + .../v1/google/cloud/dataplex_v1/__init__.py | 408 + .../cloud/dataplex_v1/gapic_metadata.json | 1093 + .../google/cloud/dataplex_v1/gapic_version.py | 16 + .../v1/google/cloud/dataplex_v1/py.typed | 2 + .../cloud/dataplex_v1/services/__init__.py | 15 + .../services/catalog_service/__init__.py | 22 + .../services/catalog_service/async_client.py | 3652 ++++ .../services/catalog_service/client.py | 3986 ++++ .../services/catalog_service/pagers.py | 837 + .../catalog_service/transports/README.rst | 9 + .../catalog_service/transports/__init__.py | 33 + .../catalog_service/transports/base.py | 707 + .../catalog_service/transports/grpc.py | 1076 + .../transports/grpc_asyncio.py | 1362 ++ .../services/content_service/__init__.py | 22 + .../services/content_service/async_client.py | 1435 ++ .../services/content_service/client.py | 1767 ++ .../services/content_service/pagers.py | 163 + .../content_service/transports/README.rst | 9 + .../content_service/transports/__init__.py | 33 + .../content_service/transports/base.py | 377 + .../content_service/transports/grpc.py | 587 + .../transports/grpc_asyncio.py | 709 + .../services/data_scan_service/__init__.py | 22 + .../data_scan_service/async_client.py | 1665 ++ .../services/data_scan_service/client.py | 2015 ++ .../services/data_scan_service/pagers.py | 297 + .../data_scan_service/transports/README.rst | 9 + .../data_scan_service/transports/__init__.py | 33 + .../data_scan_service/transports/base.py | 358 + .../data_scan_service/transports/grpc.py | 615 + .../transports/grpc_asyncio.py | 706 + .../data_taxonomy_service/__init__.py | 22 + .../data_taxonomy_service/async_client.py | 2420 +++ .../services/data_taxonomy_service/client.py | 2746 +++ .../services/data_taxonomy_service/pagers.py | 432 + .../transports/README.rst | 9 + .../transports/__init__.py | 33 + .../data_taxonomy_service/transports/base.py | 443 + .../data_taxonomy_service/transports/grpc.py | 773 + .../transports/grpc_asyncio.py | 894 + .../services/dataplex_service/__init__.py | 22 + .../services/dataplex_service/async_client.py | 4580 +++++ .../services/dataplex_service/client.py | 4933 +++++ .../services/dataplex_service/pagers.py | 1380 ++ .../dataplex_service/transports/README.rst | 9 + .../dataplex_service/transports/__init__.py | 33 + .../dataplex_service/transports/base.py | 833 + .../dataplex_service/transports/grpc.py | 1247 ++ .../transports/grpc_asyncio.py | 1593 ++ .../services/metadata_service/__init__.py | 22 + .../services/metadata_service/async_client.py | 1507 ++ .../services/metadata_service/client.py | 1840 ++ .../services/metadata_service/pagers.py | 297 + .../metadata_service/transports/README.rst | 9 + .../metadata_service/transports/__init__.py | 33 + .../metadata_service/transports/base.py | 389 + .../metadata_service/transports/grpc.py | 593 + .../transports/grpc_asyncio.py | 720 + .../cloud/dataplex_v1/types/__init__.py | 408 + .../google/cloud/dataplex_v1/types/analyze.py | 491 + .../google/cloud/dataplex_v1/types/catalog.py | 2630 +++ .../google/cloud/dataplex_v1/types/content.py | 227 + .../cloud/dataplex_v1/types/data_discovery.py | 250 + .../cloud/dataplex_v1/types/data_profile.py | 540 + .../cloud/dataplex_v1/types/data_quality.py | 912 + .../cloud/dataplex_v1/types/data_taxonomy.py | 976 + .../cloud/dataplex_v1/types/datascans.py | 905 + .../v1/google/cloud/dataplex_v1/types/logs.py | 1352 ++ .../cloud/dataplex_v1/types/metadata_.py | 1182 ++ .../cloud/dataplex_v1/types/processing.py | 186 + .../cloud/dataplex_v1/types/resources.py | 1444 ++ .../cloud/dataplex_v1/types/security.py | 90 + .../google/cloud/dataplex_v1/types/service.py | 1395 ++ .../google/cloud/dataplex_v1/types/tasks.py | 751 + .../google-cloud-dataplex/v1/mypy.ini | 3 + .../google-cloud-dataplex/v1/noxfile.py | 280 + ...talog_service_cancel_metadata_job_async.py | 50 + ...atalog_service_cancel_metadata_job_sync.py | 50 + ...atalog_service_create_aspect_type_async.py | 62 + ...catalog_service_create_aspect_type_sync.py | 62 + ...ated_catalog_service_create_entry_async.py | 57 + ...atalog_service_create_entry_group_async.py | 57 + ...catalog_service_create_entry_group_sync.py | 57 + ...rated_catalog_service_create_entry_sync.py | 57 + ...catalog_service_create_entry_type_async.py | 57 + ..._catalog_service_create_entry_type_sync.py | 57 + ...talog_service_create_metadata_job_async.py | 64 + ...atalog_service_create_metadata_job_sync.py | 64 + ...atalog_service_delete_aspect_type_async.py | 56 + ...catalog_service_delete_aspect_type_sync.py | 56 + ...ated_catalog_service_delete_entry_async.py | 52 + ...atalog_service_delete_entry_group_async.py | 56 + ...catalog_service_delete_entry_group_sync.py | 56 + ...rated_catalog_service_delete_entry_sync.py | 52 + ...catalog_service_delete_entry_type_async.py | 56 + ..._catalog_service_delete_entry_type_sync.py | 56 + ...d_catalog_service_get_aspect_type_async.py | 52 + ...ed_catalog_service_get_aspect_type_sync.py | 52 + ...nerated_catalog_service_get_entry_async.py | 52 + ...d_catalog_service_get_entry_group_async.py | 52 + ...ed_catalog_service_get_entry_group_sync.py | 52 + ...enerated_catalog_service_get_entry_sync.py | 52 + ...ed_catalog_service_get_entry_type_async.py | 52 + ...ted_catalog_service_get_entry_type_sync.py | 52 + ..._catalog_service_get_metadata_job_async.py | 52 + ...d_catalog_service_get_metadata_job_sync.py | 52 + ...catalog_service_list_aspect_types_async.py | 53 + ..._catalog_service_list_aspect_types_sync.py | 53 + ...ated_catalog_service_list_entries_async.py | 53 + ...rated_catalog_service_list_entries_sync.py | 53 + ...catalog_service_list_entry_groups_async.py | 53 + ..._catalog_service_list_entry_groups_sync.py | 53 + ..._catalog_service_list_entry_types_async.py | 53 + ...d_catalog_service_list_entry_types_sync.py | 53 + ...atalog_service_list_metadata_jobs_async.py | 53 + ...catalog_service_list_metadata_jobs_sync.py | 53 + ...ated_catalog_service_lookup_entry_async.py | 53 + ...rated_catalog_service_lookup_entry_sync.py | 53 + ...ed_catalog_service_search_entries_async.py | 54 + ...ted_catalog_service_search_entries_sync.py | 54 + ...atalog_service_update_aspect_type_async.py | 60 + ...catalog_service_update_aspect_type_sync.py | 60 + ...ated_catalog_service_update_entry_async.py | 55 + ...atalog_service_update_entry_group_async.py | 55 + ...catalog_service_update_entry_group_sync.py | 55 + ...rated_catalog_service_update_entry_sync.py | 55 + ...catalog_service_update_entry_type_async.py | 55 + ..._catalog_service_update_entry_type_sync.py | 55 + ...ed_content_service_create_content_async.py | 58 + ...ted_content_service_create_content_sync.py | 58 + ...ed_content_service_delete_content_async.py | 50 + ...ted_content_service_delete_content_sync.py | 50 + ...rated_content_service_get_content_async.py | 52 + ...erated_content_service_get_content_sync.py | 52 + ...ed_content_service_get_iam_policy_async.py | 53 + ...ted_content_service_get_iam_policy_sync.py | 53 + ...ated_content_service_list_content_async.py | 53 + ...rated_content_service_list_content_sync.py | 53 + ...ed_content_service_set_iam_policy_async.py | 53 + ...ted_content_service_set_iam_policy_sync.py | 53 + ...tent_service_test_iam_permissions_async.py | 54 + ...ntent_service_test_iam_permissions_sync.py | 54 + ...ed_content_service_update_content_async.py | 57 + ...ted_content_service_update_content_sync.py | 57 + ...ata_scan_service_create_data_scan_async.py | 62 + ...data_scan_service_create_data_scan_sync.py | 62 + ...ata_scan_service_delete_data_scan_async.py | 56 + ...data_scan_service_delete_data_scan_sync.py | 56 + ...rvice_generate_data_quality_rules_async.py | 52 + ...ervice_generate_data_quality_rules_sync.py | 52 + ...d_data_scan_service_get_data_scan_async.py | 52 + ...ta_scan_service_get_data_scan_job_async.py | 52 + ...ata_scan_service_get_data_scan_job_sync.py | 52 + ...ed_data_scan_service_get_data_scan_sync.py | 52 + ..._scan_service_list_data_scan_jobs_async.py | 53 + ...a_scan_service_list_data_scan_jobs_sync.py | 53 + ...data_scan_service_list_data_scans_async.py | 53 + ..._data_scan_service_list_data_scans_sync.py | 53 + ...d_data_scan_service_run_data_scan_async.py | 52 + ...ed_data_scan_service_run_data_scan_sync.py | 52 + ...ata_scan_service_update_data_scan_async.py | 60 + ...data_scan_service_update_data_scan_sync.py | 60 + ...omy_service_create_data_attribute_async.py | 57 + ...ice_create_data_attribute_binding_async.py | 61 + ...vice_create_data_attribute_binding_sync.py | 61 + ...nomy_service_create_data_attribute_sync.py | 57 + ...nomy_service_create_data_taxonomy_async.py | 57 + ...onomy_service_create_data_taxonomy_sync.py | 57 + ...omy_service_delete_data_attribute_async.py | 56 + ...ice_delete_data_attribute_binding_async.py | 57 + ...vice_delete_data_attribute_binding_sync.py | 57 + ...nomy_service_delete_data_attribute_sync.py | 56 + ...nomy_service_delete_data_taxonomy_async.py | 56 + ...onomy_service_delete_data_taxonomy_sync.py | 56 + ...xonomy_service_get_data_attribute_async.py | 52 + ...ervice_get_data_attribute_binding_async.py | 52 + ...service_get_data_attribute_binding_sync.py | 52 + ...axonomy_service_get_data_attribute_sync.py | 52 + ...axonomy_service_get_data_taxonomy_async.py | 52 + ...taxonomy_service_get_data_taxonomy_sync.py | 52 + ...vice_list_data_attribute_bindings_async.py | 53 + ...rvice_list_data_attribute_bindings_sync.py | 53 + ...nomy_service_list_data_attributes_async.py | 53 + ...onomy_service_list_data_attributes_sync.py | 53 + ...nomy_service_list_data_taxonomies_async.py | 53 + ...onomy_service_list_data_taxonomies_sync.py | 53 + ...omy_service_update_data_attribute_async.py | 55 + ...ice_update_data_attribute_binding_async.py | 59 + ...vice_update_data_attribute_binding_sync.py | 59 + ...nomy_service_update_data_attribute_sync.py | 55 + ...nomy_service_update_data_taxonomy_async.py | 55 + ...onomy_service_update_data_taxonomy_sync.py | 55 + ...rated_dataplex_service_cancel_job_async.py | 50 + ...erated_dataplex_service_cancel_job_sync.py | 50 + ...ted_dataplex_service_create_asset_async.py | 61 + ...ated_dataplex_service_create_asset_sync.py | 61 + ...taplex_service_create_environment_async.py | 61 + ...ataplex_service_create_environment_sync.py | 61 + ...ated_dataplex_service_create_lake_async.py | 57 + ...rated_dataplex_service_create_lake_sync.py | 57 + ...ated_dataplex_service_create_task_async.py | 64 + ...rated_dataplex_service_create_task_sync.py | 64 + ...ated_dataplex_service_create_zone_async.py | 62 + ...rated_dataplex_service_create_zone_sync.py | 62 + ...ted_dataplex_service_delete_asset_async.py | 56 + ...ated_dataplex_service_delete_asset_sync.py | 56 + ...taplex_service_delete_environment_async.py | 56 + ...ataplex_service_delete_environment_sync.py | 56 + ...ated_dataplex_service_delete_lake_async.py | 56 + ...rated_dataplex_service_delete_lake_sync.py | 56 + ...ated_dataplex_service_delete_task_async.py | 56 + ...rated_dataplex_service_delete_task_sync.py | 56 + ...ated_dataplex_service_delete_zone_async.py | 56 + ...rated_dataplex_service_delete_zone_sync.py | 56 + ...erated_dataplex_service_get_asset_async.py | 52 + ...nerated_dataplex_service_get_asset_sync.py | 52 + ..._dataplex_service_get_environment_async.py | 52 + ...d_dataplex_service_get_environment_sync.py | 52 + ...enerated_dataplex_service_get_job_async.py | 52 + ...generated_dataplex_service_get_job_sync.py | 52 + ...nerated_dataplex_service_get_lake_async.py | 52 + ...enerated_dataplex_service_get_lake_sync.py | 52 + ...nerated_dataplex_service_get_task_async.py | 52 + ...enerated_dataplex_service_get_task_sync.py | 52 + ...nerated_dataplex_service_get_zone_async.py | 52 + ...enerated_dataplex_service_get_zone_sync.py | 52 + ...taplex_service_list_asset_actions_async.py | 53 + ...ataplex_service_list_asset_actions_sync.py | 53 + ...ated_dataplex_service_list_assets_async.py | 53 + ...rated_dataplex_service_list_assets_sync.py | 53 + ...ataplex_service_list_environments_async.py | 53 + ...dataplex_service_list_environments_sync.py | 53 + ...erated_dataplex_service_list_jobs_async.py | 53 + ...nerated_dataplex_service_list_jobs_sync.py | 53 + ...ataplex_service_list_lake_actions_async.py | 53 + ...dataplex_service_list_lake_actions_sync.py | 53 + ...rated_dataplex_service_list_lakes_async.py | 53 + ...erated_dataplex_service_list_lakes_sync.py | 53 + ...ed_dataplex_service_list_sessions_async.py | 53 + ...ted_dataplex_service_list_sessions_sync.py | 53 + ...rated_dataplex_service_list_tasks_async.py | 53 + ...erated_dataplex_service_list_tasks_sync.py | 53 + ...ataplex_service_list_zone_actions_async.py | 53 + ...dataplex_service_list_zone_actions_sync.py | 53 + ...rated_dataplex_service_list_zones_async.py | 53 + ...erated_dataplex_service_list_zones_sync.py | 53 + ...nerated_dataplex_service_run_task_async.py | 52 + ...enerated_dataplex_service_run_task_sync.py | 52 + ...ted_dataplex_service_update_asset_async.py | 59 + ...ated_dataplex_service_update_asset_sync.py | 59 + ...taplex_service_update_environment_async.py | 59 + ...ataplex_service_update_environment_sync.py | 59 + ...ated_dataplex_service_update_lake_async.py | 55 + ...rated_dataplex_service_update_lake_sync.py | 55 + ...ated_dataplex_service_update_task_async.py | 62 + ...rated_dataplex_service_update_task_sync.py | 62 + ...ated_dataplex_service_update_zone_async.py | 60 + ...rated_dataplex_service_update_zone_sync.py | 60 + ...ed_metadata_service_create_entity_async.py | 62 + ...ted_metadata_service_create_entity_sync.py | 62 + ...metadata_service_create_partition_async.py | 57 + ..._metadata_service_create_partition_sync.py | 57 + ...ed_metadata_service_delete_entity_async.py | 51 + ...ted_metadata_service_delete_entity_sync.py | 51 + ...metadata_service_delete_partition_async.py | 50 + ..._metadata_service_delete_partition_sync.py | 50 + ...rated_metadata_service_get_entity_async.py | 52 + ...erated_metadata_service_get_entity_sync.py | 52 + ...ed_metadata_service_get_partition_async.py | 52 + ...ted_metadata_service_get_partition_sync.py | 52 + ...ed_metadata_service_list_entities_async.py | 54 + ...ted_metadata_service_list_entities_sync.py | 54 + ..._metadata_service_list_partitions_async.py | 53 + ...d_metadata_service_list_partitions_sync.py | 53 + ...ed_metadata_service_update_entity_async.py | 61 + ...ted_metadata_service_update_entity_sync.py | 61 + ...pet_metadata_google.cloud.dataplex.v1.json | 16421 +++++++++++++++ .../v1/scripts/fixup_dataplex_v1_keywords.py | 275 + .../google-cloud-dataplex/v1/setup.py | 99 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.13.txt | 7 + .../v1/testing/constraints-3.7.txt | 11 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/dataplex_v1/__init__.py | 16 + .../gapic/dataplex_v1/test_catalog_service.py | 13193 ++++++++++++ .../gapic/dataplex_v1/test_content_service.py | 5202 +++++ .../dataplex_v1/test_data_scan_service.py | 6013 ++++++ .../dataplex_v1/test_data_taxonomy_service.py | 8505 ++++++++ .../dataplex_v1/test_dataplex_service.py | 16649 ++++++++++++++++ .../dataplex_v1/test_metadata_service.py | 5952 ++++++ 315 files changed, 150265 insertions(+) create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc new file mode 100644 index 000000000000..8df508b38cbc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dataplex/__init__.py + google/cloud/dataplex/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in new file mode 100644 index 000000000000..eae3b9346087 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dataplex *.py +recursive-include google/cloud/dataplex_v1 *.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/README.rst new file mode 100644 index 000000000000..a1910d929c5f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dataplex API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dataplex API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py new file mode 100644 index 000000000000..56923e180c46 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dataplex documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dataplex" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dataplex-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dataplex.tex", + u"google-cloud-dataplex Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dataplex", + u"Google Cloud Dataplex Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dataplex", + u"google-cloud-dataplex Documentation", + author, + "google-cloud-dataplex", + "GAPIC library for Google Cloud Dataplex API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst new file mode 100644 index 000000000000..ef6306fadb87 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst @@ -0,0 +1,10 @@ +CatalogService +-------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.catalog_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.catalog_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst new file mode 100644 index 000000000000..ce3774365501 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst @@ -0,0 +1,10 @@ +ContentService +-------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.content_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.content_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst new file mode 100644 index 000000000000..c9281cda5823 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst @@ -0,0 +1,10 @@ +DataScanService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.data_scan_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.data_scan_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst new file mode 100644 index 000000000000..b2a185a3c43f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst @@ -0,0 +1,10 @@ +DataTaxonomyService +------------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst new file mode 100644 index 000000000000..5ecb20ccef96 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst @@ -0,0 +1,10 @@ +DataplexService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.dataplex_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.dataplex_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst new file mode 100644 index 000000000000..d5bf19660ab5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst @@ -0,0 +1,10 @@ +MetadataService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.metadata_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.metadata_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst new file mode 100644 index 000000000000..42044fdd76d1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst @@ -0,0 +1,11 @@ +Services for Google Cloud Dataplex v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + catalog_service + content_service + dataplex_service + data_scan_service + data_taxonomy_service + metadata_service diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst new file mode 100644 index 000000000000..391acd51ef80 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dataplex v1 API +====================================== + +.. automodule:: google.cloud.dataplex_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst new file mode 100644 index 000000000000..56aeec907611 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dataplex_v1/services_ + dataplex_v1/types_ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py new file mode 100644 index 000000000000..ce1a3bf8386c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataplex import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dataplex_v1.services.catalog_service.client import CatalogServiceClient +from google.cloud.dataplex_v1.services.catalog_service.async_client import CatalogServiceAsyncClient +from google.cloud.dataplex_v1.services.content_service.client import ContentServiceClient +from google.cloud.dataplex_v1.services.content_service.async_client import ContentServiceAsyncClient +from google.cloud.dataplex_v1.services.dataplex_service.client import DataplexServiceClient +from google.cloud.dataplex_v1.services.dataplex_service.async_client import DataplexServiceAsyncClient +from google.cloud.dataplex_v1.services.data_scan_service.client import DataScanServiceClient +from google.cloud.dataplex_v1.services.data_scan_service.async_client import DataScanServiceAsyncClient +from google.cloud.dataplex_v1.services.data_taxonomy_service.client import DataTaxonomyServiceClient +from google.cloud.dataplex_v1.services.data_taxonomy_service.async_client import DataTaxonomyServiceAsyncClient +from google.cloud.dataplex_v1.services.metadata_service.client import MetadataServiceClient +from google.cloud.dataplex_v1.services.metadata_service.async_client import MetadataServiceAsyncClient + +from google.cloud.dataplex_v1.types.analyze import Content +from google.cloud.dataplex_v1.types.analyze import Environment +from google.cloud.dataplex_v1.types.analyze import Session +from google.cloud.dataplex_v1.types.catalog import Aspect +from google.cloud.dataplex_v1.types.catalog import AspectSource +from google.cloud.dataplex_v1.types.catalog import AspectType +from google.cloud.dataplex_v1.types.catalog import CancelMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import CreateAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import CreateMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import DeleteAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import Entry +from google.cloud.dataplex_v1.types.catalog import EntryGroup +from google.cloud.dataplex_v1.types.catalog import EntrySource +from google.cloud.dataplex_v1.types.catalog import EntryType +from google.cloud.dataplex_v1.types.catalog import GetAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import GetMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import ImportItem +from google.cloud.dataplex_v1.types.catalog import ListAspectTypesRequest +from google.cloud.dataplex_v1.types.catalog import ListAspectTypesResponse +from google.cloud.dataplex_v1.types.catalog import ListEntriesRequest +from google.cloud.dataplex_v1.types.catalog import ListEntriesResponse +from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsRequest +from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsResponse +from google.cloud.dataplex_v1.types.catalog import ListEntryTypesRequest +from google.cloud.dataplex_v1.types.catalog import ListEntryTypesResponse +from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsRequest +from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsResponse +from google.cloud.dataplex_v1.types.catalog import LookupEntryRequest +from google.cloud.dataplex_v1.types.catalog import MetadataJob +from google.cloud.dataplex_v1.types.catalog import SearchEntriesRequest +from google.cloud.dataplex_v1.types.catalog import SearchEntriesResponse +from google.cloud.dataplex_v1.types.catalog import SearchEntriesResult +from google.cloud.dataplex_v1.types.catalog import UpdateAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import EntryView +from google.cloud.dataplex_v1.types.catalog import TransferStatus +from google.cloud.dataplex_v1.types.content import CreateContentRequest +from google.cloud.dataplex_v1.types.content import DeleteContentRequest +from google.cloud.dataplex_v1.types.content import GetContentRequest +from google.cloud.dataplex_v1.types.content import ListContentRequest +from google.cloud.dataplex_v1.types.content import ListContentResponse +from google.cloud.dataplex_v1.types.content import UpdateContentRequest +from google.cloud.dataplex_v1.types.data_discovery import DataDiscoveryResult +from google.cloud.dataplex_v1.types.data_discovery import DataDiscoverySpec +from google.cloud.dataplex_v1.types.data_profile import DataProfileResult +from google.cloud.dataplex_v1.types.data_profile import DataProfileSpec +from google.cloud.dataplex_v1.types.data_quality import DataQualityColumnResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityDimension +from google.cloud.dataplex_v1.types.data_quality import DataQualityDimensionResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityRule +from google.cloud.dataplex_v1.types.data_quality import DataQualityRuleResult +from google.cloud.dataplex_v1.types.data_quality import DataQualitySpec +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DataAttribute +from google.cloud.dataplex_v1.types.data_taxonomy import DataAttributeBinding +from google.cloud.dataplex_v1.types.data_taxonomy import DataTaxonomy +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsResponse +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesResponse +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesResponse +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataTaxonomyRequest +from google.cloud.dataplex_v1.types.datascans import CreateDataScanRequest +from google.cloud.dataplex_v1.types.datascans import DataScan +from google.cloud.dataplex_v1.types.datascans import DataScanJob +from google.cloud.dataplex_v1.types.datascans import DeleteDataScanRequest +from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesRequest +from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesResponse +from google.cloud.dataplex_v1.types.datascans import GetDataScanJobRequest +from google.cloud.dataplex_v1.types.datascans import GetDataScanRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsResponse +from google.cloud.dataplex_v1.types.datascans import ListDataScansRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScansResponse +from google.cloud.dataplex_v1.types.datascans import RunDataScanRequest +from google.cloud.dataplex_v1.types.datascans import RunDataScanResponse +from google.cloud.dataplex_v1.types.datascans import UpdateDataScanRequest +from google.cloud.dataplex_v1.types.datascans import DataScanType +from google.cloud.dataplex_v1.types.logs import DataQualityScanRuleResult +from google.cloud.dataplex_v1.types.logs import DataScanEvent +from google.cloud.dataplex_v1.types.logs import DiscoveryEvent +from google.cloud.dataplex_v1.types.logs import GovernanceEvent +from google.cloud.dataplex_v1.types.logs import JobEvent +from google.cloud.dataplex_v1.types.logs import SessionEvent +from google.cloud.dataplex_v1.types.metadata_ import CreateEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import CreatePartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import DeleteEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import DeletePartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import Entity +from google.cloud.dataplex_v1.types.metadata_ import GetEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import GetPartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesRequest +from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesResponse +from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsRequest +from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsResponse +from google.cloud.dataplex_v1.types.metadata_ import Partition +from google.cloud.dataplex_v1.types.metadata_ import Schema +from google.cloud.dataplex_v1.types.metadata_ import StorageAccess +from google.cloud.dataplex_v1.types.metadata_ import StorageFormat +from google.cloud.dataplex_v1.types.metadata_ import UpdateEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import StorageSystem +from google.cloud.dataplex_v1.types.processing import DataSource +from google.cloud.dataplex_v1.types.processing import ScannedData +from google.cloud.dataplex_v1.types.processing import Trigger +from google.cloud.dataplex_v1.types.resources import Action +from google.cloud.dataplex_v1.types.resources import Asset +from google.cloud.dataplex_v1.types.resources import AssetStatus +from google.cloud.dataplex_v1.types.resources import Lake +from google.cloud.dataplex_v1.types.resources import Zone +from google.cloud.dataplex_v1.types.resources import State +from google.cloud.dataplex_v1.types.security import DataAccessSpec +from google.cloud.dataplex_v1.types.security import ResourceAccessSpec +from google.cloud.dataplex_v1.types.service import CancelJobRequest +from google.cloud.dataplex_v1.types.service import CreateAssetRequest +from google.cloud.dataplex_v1.types.service import CreateEnvironmentRequest +from google.cloud.dataplex_v1.types.service import CreateLakeRequest +from google.cloud.dataplex_v1.types.service import CreateTaskRequest +from google.cloud.dataplex_v1.types.service import CreateZoneRequest +from google.cloud.dataplex_v1.types.service import DeleteAssetRequest +from google.cloud.dataplex_v1.types.service import DeleteEnvironmentRequest +from google.cloud.dataplex_v1.types.service import DeleteLakeRequest +from google.cloud.dataplex_v1.types.service import DeleteTaskRequest +from google.cloud.dataplex_v1.types.service import DeleteZoneRequest +from google.cloud.dataplex_v1.types.service import GetAssetRequest +from google.cloud.dataplex_v1.types.service import GetEnvironmentRequest +from google.cloud.dataplex_v1.types.service import GetJobRequest +from google.cloud.dataplex_v1.types.service import GetLakeRequest +from google.cloud.dataplex_v1.types.service import GetTaskRequest +from google.cloud.dataplex_v1.types.service import GetZoneRequest +from google.cloud.dataplex_v1.types.service import ListActionsResponse +from google.cloud.dataplex_v1.types.service import ListAssetActionsRequest +from google.cloud.dataplex_v1.types.service import ListAssetsRequest +from google.cloud.dataplex_v1.types.service import ListAssetsResponse +from google.cloud.dataplex_v1.types.service import ListEnvironmentsRequest +from google.cloud.dataplex_v1.types.service import ListEnvironmentsResponse +from google.cloud.dataplex_v1.types.service import ListJobsRequest +from google.cloud.dataplex_v1.types.service import ListJobsResponse +from google.cloud.dataplex_v1.types.service import ListLakeActionsRequest +from google.cloud.dataplex_v1.types.service import ListLakesRequest +from google.cloud.dataplex_v1.types.service import ListLakesResponse +from google.cloud.dataplex_v1.types.service import ListSessionsRequest +from google.cloud.dataplex_v1.types.service import ListSessionsResponse +from google.cloud.dataplex_v1.types.service import ListTasksRequest +from google.cloud.dataplex_v1.types.service import ListTasksResponse +from google.cloud.dataplex_v1.types.service import ListZoneActionsRequest +from google.cloud.dataplex_v1.types.service import ListZonesRequest +from google.cloud.dataplex_v1.types.service import ListZonesResponse +from google.cloud.dataplex_v1.types.service import OperationMetadata +from google.cloud.dataplex_v1.types.service import RunTaskRequest +from google.cloud.dataplex_v1.types.service import RunTaskResponse +from google.cloud.dataplex_v1.types.service import UpdateAssetRequest +from google.cloud.dataplex_v1.types.service import UpdateEnvironmentRequest +from google.cloud.dataplex_v1.types.service import UpdateLakeRequest +from google.cloud.dataplex_v1.types.service import UpdateTaskRequest +from google.cloud.dataplex_v1.types.service import UpdateZoneRequest +from google.cloud.dataplex_v1.types.tasks import Job +from google.cloud.dataplex_v1.types.tasks import Task + +__all__ = ('CatalogServiceClient', + 'CatalogServiceAsyncClient', + 'ContentServiceClient', + 'ContentServiceAsyncClient', + 'DataplexServiceClient', + 'DataplexServiceAsyncClient', + 'DataScanServiceClient', + 'DataScanServiceAsyncClient', + 'DataTaxonomyServiceClient', + 'DataTaxonomyServiceAsyncClient', + 'MetadataServiceClient', + 'MetadataServiceAsyncClient', + 'Content', + 'Environment', + 'Session', + 'Aspect', + 'AspectSource', + 'AspectType', + 'CancelMetadataJobRequest', + 'CreateAspectTypeRequest', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateEntryTypeRequest', + 'CreateMetadataJobRequest', + 'DeleteAspectTypeRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteEntryTypeRequest', + 'Entry', + 'EntryGroup', + 'EntrySource', + 'EntryType', + 'GetAspectTypeRequest', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetEntryTypeRequest', + 'GetMetadataJobRequest', + 'ImportItem', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'LookupEntryRequest', + 'MetadataJob', + 'SearchEntriesRequest', + 'SearchEntriesResponse', + 'SearchEntriesResult', + 'UpdateAspectTypeRequest', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateEntryTypeRequest', + 'EntryView', + 'TransferStatus', + 'CreateContentRequest', + 'DeleteContentRequest', + 'GetContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'UpdateContentRequest', + 'DataDiscoveryResult', + 'DataDiscoverySpec', + 'DataProfileResult', + 'DataProfileSpec', + 'DataQualityColumnResult', + 'DataQualityDimension', + 'DataQualityDimensionResult', + 'DataQualityResult', + 'DataQualityRule', + 'DataQualityRuleResult', + 'DataQualitySpec', + 'CreateDataAttributeBindingRequest', + 'CreateDataAttributeRequest', + 'CreateDataTaxonomyRequest', + 'DataAttribute', + 'DataAttributeBinding', + 'DataTaxonomy', + 'DeleteDataAttributeBindingRequest', + 'DeleteDataAttributeRequest', + 'DeleteDataTaxonomyRequest', + 'GetDataAttributeBindingRequest', + 'GetDataAttributeRequest', + 'GetDataTaxonomyRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'UpdateDataAttributeBindingRequest', + 'UpdateDataAttributeRequest', + 'UpdateDataTaxonomyRequest', + 'CreateDataScanRequest', + 'DataScan', + 'DataScanJob', + 'DeleteDataScanRequest', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'GetDataScanJobRequest', + 'GetDataScanRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'UpdateDataScanRequest', + 'DataScanType', + 'DataQualityScanRuleResult', + 'DataScanEvent', + 'DiscoveryEvent', + 'GovernanceEvent', + 'JobEvent', + 'SessionEvent', + 'CreateEntityRequest', + 'CreatePartitionRequest', + 'DeleteEntityRequest', + 'DeletePartitionRequest', + 'Entity', + 'GetEntityRequest', + 'GetPartitionRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'ListPartitionsRequest', + 'ListPartitionsResponse', + 'Partition', + 'Schema', + 'StorageAccess', + 'StorageFormat', + 'UpdateEntityRequest', + 'StorageSystem', + 'DataSource', + 'ScannedData', + 'Trigger', + 'Action', + 'Asset', + 'AssetStatus', + 'Lake', + 'Zone', + 'State', + 'DataAccessSpec', + 'ResourceAccessSpec', + 'CancelJobRequest', + 'CreateAssetRequest', + 'CreateEnvironmentRequest', + 'CreateLakeRequest', + 'CreateTaskRequest', + 'CreateZoneRequest', + 'DeleteAssetRequest', + 'DeleteEnvironmentRequest', + 'DeleteLakeRequest', + 'DeleteTaskRequest', + 'DeleteZoneRequest', + 'GetAssetRequest', + 'GetEnvironmentRequest', + 'GetJobRequest', + 'GetLakeRequest', + 'GetTaskRequest', + 'GetZoneRequest', + 'ListActionsResponse', + 'ListAssetActionsRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListLakeActionsRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'ListZoneActionsRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'OperationMetadata', + 'RunTaskRequest', + 'RunTaskResponse', + 'UpdateAssetRequest', + 'UpdateEnvironmentRequest', + 'UpdateLakeRequest', + 'UpdateTaskRequest', + 'UpdateZoneRequest', + 'Job', + 'Task', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed new file mode 100644 index 000000000000..c932c263028e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py new file mode 100644 index 000000000000..7fd5f0814f43 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py @@ -0,0 +1,408 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataplex_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.catalog_service import CatalogServiceClient +from .services.catalog_service import CatalogServiceAsyncClient +from .services.content_service import ContentServiceClient +from .services.content_service import ContentServiceAsyncClient +from .services.dataplex_service import DataplexServiceClient +from .services.dataplex_service import DataplexServiceAsyncClient +from .services.data_scan_service import DataScanServiceClient +from .services.data_scan_service import DataScanServiceAsyncClient +from .services.data_taxonomy_service import DataTaxonomyServiceClient +from .services.data_taxonomy_service import DataTaxonomyServiceAsyncClient +from .services.metadata_service import MetadataServiceClient +from .services.metadata_service import MetadataServiceAsyncClient + +from .types.analyze import Content +from .types.analyze import Environment +from .types.analyze import Session +from .types.catalog import Aspect +from .types.catalog import AspectSource +from .types.catalog import AspectType +from .types.catalog import CancelMetadataJobRequest +from .types.catalog import CreateAspectTypeRequest +from .types.catalog import CreateEntryGroupRequest +from .types.catalog import CreateEntryRequest +from .types.catalog import CreateEntryTypeRequest +from .types.catalog import CreateMetadataJobRequest +from .types.catalog import DeleteAspectTypeRequest +from .types.catalog import DeleteEntryGroupRequest +from .types.catalog import DeleteEntryRequest +from .types.catalog import DeleteEntryTypeRequest +from .types.catalog import Entry +from .types.catalog import EntryGroup +from .types.catalog import EntrySource +from .types.catalog import EntryType +from .types.catalog import GetAspectTypeRequest +from .types.catalog import GetEntryGroupRequest +from .types.catalog import GetEntryRequest +from .types.catalog import GetEntryTypeRequest +from .types.catalog import GetMetadataJobRequest +from .types.catalog import ImportItem +from .types.catalog import ListAspectTypesRequest +from .types.catalog import ListAspectTypesResponse +from .types.catalog import ListEntriesRequest +from .types.catalog import ListEntriesResponse +from .types.catalog import ListEntryGroupsRequest +from .types.catalog import ListEntryGroupsResponse +from .types.catalog import ListEntryTypesRequest +from .types.catalog import ListEntryTypesResponse +from .types.catalog import ListMetadataJobsRequest +from .types.catalog import ListMetadataJobsResponse +from .types.catalog import LookupEntryRequest +from .types.catalog import MetadataJob +from .types.catalog import SearchEntriesRequest +from .types.catalog import SearchEntriesResponse +from .types.catalog import SearchEntriesResult +from .types.catalog import UpdateAspectTypeRequest +from .types.catalog import UpdateEntryGroupRequest +from .types.catalog import UpdateEntryRequest +from .types.catalog import UpdateEntryTypeRequest +from .types.catalog import EntryView +from .types.catalog import TransferStatus +from .types.content import CreateContentRequest +from .types.content import DeleteContentRequest +from .types.content import GetContentRequest +from .types.content import ListContentRequest +from .types.content import ListContentResponse +from .types.content import UpdateContentRequest +from .types.data_discovery import DataDiscoveryResult +from .types.data_discovery import DataDiscoverySpec +from .types.data_profile import DataProfileResult +from .types.data_profile import DataProfileSpec +from .types.data_quality import DataQualityColumnResult +from .types.data_quality import DataQualityDimension +from .types.data_quality import DataQualityDimensionResult +from .types.data_quality import DataQualityResult +from .types.data_quality import DataQualityRule +from .types.data_quality import DataQualityRuleResult +from .types.data_quality import DataQualitySpec +from .types.data_taxonomy import CreateDataAttributeBindingRequest +from .types.data_taxonomy import CreateDataAttributeRequest +from .types.data_taxonomy import CreateDataTaxonomyRequest +from .types.data_taxonomy import DataAttribute +from .types.data_taxonomy import DataAttributeBinding +from .types.data_taxonomy import DataTaxonomy +from .types.data_taxonomy import DeleteDataAttributeBindingRequest +from .types.data_taxonomy import DeleteDataAttributeRequest +from .types.data_taxonomy import DeleteDataTaxonomyRequest +from .types.data_taxonomy import GetDataAttributeBindingRequest +from .types.data_taxonomy import GetDataAttributeRequest +from .types.data_taxonomy import GetDataTaxonomyRequest +from .types.data_taxonomy import ListDataAttributeBindingsRequest +from .types.data_taxonomy import ListDataAttributeBindingsResponse +from .types.data_taxonomy import ListDataAttributesRequest +from .types.data_taxonomy import ListDataAttributesResponse +from .types.data_taxonomy import ListDataTaxonomiesRequest +from .types.data_taxonomy import ListDataTaxonomiesResponse +from .types.data_taxonomy import UpdateDataAttributeBindingRequest +from .types.data_taxonomy import UpdateDataAttributeRequest +from .types.data_taxonomy import UpdateDataTaxonomyRequest +from .types.datascans import CreateDataScanRequest +from .types.datascans import DataScan +from .types.datascans import DataScanJob +from .types.datascans import DeleteDataScanRequest +from .types.datascans import GenerateDataQualityRulesRequest +from .types.datascans import GenerateDataQualityRulesResponse +from .types.datascans import GetDataScanJobRequest +from .types.datascans import GetDataScanRequest +from .types.datascans import ListDataScanJobsRequest +from .types.datascans import ListDataScanJobsResponse +from .types.datascans import ListDataScansRequest +from .types.datascans import ListDataScansResponse +from .types.datascans import RunDataScanRequest +from .types.datascans import RunDataScanResponse +from .types.datascans import UpdateDataScanRequest +from .types.datascans import DataScanType +from .types.logs import DataQualityScanRuleResult +from .types.logs import DataScanEvent +from .types.logs import DiscoveryEvent +from .types.logs import GovernanceEvent +from .types.logs import JobEvent +from .types.logs import SessionEvent +from .types.metadata_ import CreateEntityRequest +from .types.metadata_ import CreatePartitionRequest +from .types.metadata_ import DeleteEntityRequest +from .types.metadata_ import DeletePartitionRequest +from .types.metadata_ import Entity +from .types.metadata_ import GetEntityRequest +from .types.metadata_ import GetPartitionRequest +from .types.metadata_ import ListEntitiesRequest +from .types.metadata_ import ListEntitiesResponse +from .types.metadata_ import ListPartitionsRequest +from .types.metadata_ import ListPartitionsResponse +from .types.metadata_ import Partition +from .types.metadata_ import Schema +from .types.metadata_ import StorageAccess +from .types.metadata_ import StorageFormat +from .types.metadata_ import UpdateEntityRequest +from .types.metadata_ import StorageSystem +from .types.processing import DataSource +from .types.processing import ScannedData +from .types.processing import Trigger +from .types.resources import Action +from .types.resources import Asset +from .types.resources import AssetStatus +from .types.resources import Lake +from .types.resources import Zone +from .types.resources import State +from .types.security import DataAccessSpec +from .types.security import ResourceAccessSpec +from .types.service import CancelJobRequest +from .types.service import CreateAssetRequest +from .types.service import CreateEnvironmentRequest +from .types.service import CreateLakeRequest +from .types.service import CreateTaskRequest +from .types.service import CreateZoneRequest +from .types.service import DeleteAssetRequest +from .types.service import DeleteEnvironmentRequest +from .types.service import DeleteLakeRequest +from .types.service import DeleteTaskRequest +from .types.service import DeleteZoneRequest +from .types.service import GetAssetRequest +from .types.service import GetEnvironmentRequest +from .types.service import GetJobRequest +from .types.service import GetLakeRequest +from .types.service import GetTaskRequest +from .types.service import GetZoneRequest +from .types.service import ListActionsResponse +from .types.service import ListAssetActionsRequest +from .types.service import ListAssetsRequest +from .types.service import ListAssetsResponse +from .types.service import ListEnvironmentsRequest +from .types.service import ListEnvironmentsResponse +from .types.service import ListJobsRequest +from .types.service import ListJobsResponse +from .types.service import ListLakeActionsRequest +from .types.service import ListLakesRequest +from .types.service import ListLakesResponse +from .types.service import ListSessionsRequest +from .types.service import ListSessionsResponse +from .types.service import ListTasksRequest +from .types.service import ListTasksResponse +from .types.service import ListZoneActionsRequest +from .types.service import ListZonesRequest +from .types.service import ListZonesResponse +from .types.service import OperationMetadata +from .types.service import RunTaskRequest +from .types.service import RunTaskResponse +from .types.service import UpdateAssetRequest +from .types.service import UpdateEnvironmentRequest +from .types.service import UpdateLakeRequest +from .types.service import UpdateTaskRequest +from .types.service import UpdateZoneRequest +from .types.tasks import Job +from .types.tasks import Task + +__all__ = ( + 'CatalogServiceAsyncClient', + 'ContentServiceAsyncClient', + 'DataScanServiceAsyncClient', + 'DataTaxonomyServiceAsyncClient', + 'DataplexServiceAsyncClient', + 'MetadataServiceAsyncClient', +'Action', +'Aspect', +'AspectSource', +'AspectType', +'Asset', +'AssetStatus', +'CancelJobRequest', +'CancelMetadataJobRequest', +'CatalogServiceClient', +'Content', +'ContentServiceClient', +'CreateAspectTypeRequest', +'CreateAssetRequest', +'CreateContentRequest', +'CreateDataAttributeBindingRequest', +'CreateDataAttributeRequest', +'CreateDataScanRequest', +'CreateDataTaxonomyRequest', +'CreateEntityRequest', +'CreateEntryGroupRequest', +'CreateEntryRequest', +'CreateEntryTypeRequest', +'CreateEnvironmentRequest', +'CreateLakeRequest', +'CreateMetadataJobRequest', +'CreatePartitionRequest', +'CreateTaskRequest', +'CreateZoneRequest', +'DataAccessSpec', +'DataAttribute', +'DataAttributeBinding', +'DataDiscoveryResult', +'DataDiscoverySpec', +'DataProfileResult', +'DataProfileSpec', +'DataQualityColumnResult', +'DataQualityDimension', +'DataQualityDimensionResult', +'DataQualityResult', +'DataQualityRule', +'DataQualityRuleResult', +'DataQualityScanRuleResult', +'DataQualitySpec', +'DataScan', +'DataScanEvent', +'DataScanJob', +'DataScanServiceClient', +'DataScanType', +'DataSource', +'DataTaxonomy', +'DataTaxonomyServiceClient', +'DataplexServiceClient', +'DeleteAspectTypeRequest', +'DeleteAssetRequest', +'DeleteContentRequest', +'DeleteDataAttributeBindingRequest', +'DeleteDataAttributeRequest', +'DeleteDataScanRequest', +'DeleteDataTaxonomyRequest', +'DeleteEntityRequest', +'DeleteEntryGroupRequest', +'DeleteEntryRequest', +'DeleteEntryTypeRequest', +'DeleteEnvironmentRequest', +'DeleteLakeRequest', +'DeletePartitionRequest', +'DeleteTaskRequest', +'DeleteZoneRequest', +'DiscoveryEvent', +'Entity', +'Entry', +'EntryGroup', +'EntrySource', +'EntryType', +'EntryView', +'Environment', +'GenerateDataQualityRulesRequest', +'GenerateDataQualityRulesResponse', +'GetAspectTypeRequest', +'GetAssetRequest', +'GetContentRequest', +'GetDataAttributeBindingRequest', +'GetDataAttributeRequest', +'GetDataScanJobRequest', +'GetDataScanRequest', +'GetDataTaxonomyRequest', +'GetEntityRequest', +'GetEntryGroupRequest', +'GetEntryRequest', +'GetEntryTypeRequest', +'GetEnvironmentRequest', +'GetJobRequest', +'GetLakeRequest', +'GetMetadataJobRequest', +'GetPartitionRequest', +'GetTaskRequest', +'GetZoneRequest', +'GovernanceEvent', +'ImportItem', +'Job', +'JobEvent', +'Lake', +'ListActionsResponse', +'ListAspectTypesRequest', +'ListAspectTypesResponse', +'ListAssetActionsRequest', +'ListAssetsRequest', +'ListAssetsResponse', +'ListContentRequest', +'ListContentResponse', +'ListDataAttributeBindingsRequest', +'ListDataAttributeBindingsResponse', +'ListDataAttributesRequest', +'ListDataAttributesResponse', +'ListDataScanJobsRequest', +'ListDataScanJobsResponse', +'ListDataScansRequest', +'ListDataScansResponse', +'ListDataTaxonomiesRequest', +'ListDataTaxonomiesResponse', +'ListEntitiesRequest', +'ListEntitiesResponse', +'ListEntriesRequest', +'ListEntriesResponse', +'ListEntryGroupsRequest', +'ListEntryGroupsResponse', +'ListEntryTypesRequest', +'ListEntryTypesResponse', +'ListEnvironmentsRequest', +'ListEnvironmentsResponse', +'ListJobsRequest', +'ListJobsResponse', +'ListLakeActionsRequest', +'ListLakesRequest', +'ListLakesResponse', +'ListMetadataJobsRequest', +'ListMetadataJobsResponse', +'ListPartitionsRequest', +'ListPartitionsResponse', +'ListSessionsRequest', +'ListSessionsResponse', +'ListTasksRequest', +'ListTasksResponse', +'ListZoneActionsRequest', +'ListZonesRequest', +'ListZonesResponse', +'LookupEntryRequest', +'MetadataJob', +'MetadataServiceClient', +'OperationMetadata', +'Partition', +'ResourceAccessSpec', +'RunDataScanRequest', +'RunDataScanResponse', +'RunTaskRequest', +'RunTaskResponse', +'ScannedData', +'Schema', +'SearchEntriesRequest', +'SearchEntriesResponse', +'SearchEntriesResult', +'Session', +'SessionEvent', +'State', +'StorageAccess', +'StorageFormat', +'StorageSystem', +'Task', +'TransferStatus', +'Trigger', +'UpdateAspectTypeRequest', +'UpdateAssetRequest', +'UpdateContentRequest', +'UpdateDataAttributeBindingRequest', +'UpdateDataAttributeRequest', +'UpdateDataScanRequest', +'UpdateDataTaxonomyRequest', +'UpdateEntityRequest', +'UpdateEntryGroupRequest', +'UpdateEntryRequest', +'UpdateEntryTypeRequest', +'UpdateEnvironmentRequest', +'UpdateLakeRequest', +'UpdateTaskRequest', +'UpdateZoneRequest', +'Zone', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json new file mode 100644 index 000000000000..9fb1150241c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json @@ -0,0 +1,1093 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataplex_v1", + "protoPackage": "google.cloud.dataplex.v1", + "schema": "1.0", + "services": { + "CatalogService": { + "clients": { + "grpc": { + "libraryClient": "CatalogServiceClient", + "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, + "CreateAspectType": { + "methods": [ + "create_aspect_type" + ] + }, + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateEntryType": { + "methods": [ + "create_entry_type" + ] + }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, + "DeleteAspectType": { + "methods": [ + "delete_aspect_type" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteEntryType": { + "methods": [ + "delete_entry_type" + ] + }, + "GetAspectType": { + "methods": [ + "get_aspect_type" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetEntryType": { + "methods": [ + "get_entry_type" + ] + }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, + "ListAspectTypes": { + "methods": [ + "list_aspect_types" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListEntryTypes": { + "methods": [ + "list_entry_types" + ] + }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "SearchEntries": { + "methods": [ + "search_entries" + ] + }, + "UpdateAspectType": { + "methods": [ + "update_aspect_type" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateEntryType": { + "methods": [ + "update_entry_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CatalogServiceAsyncClient", + "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, + "CreateAspectType": { + "methods": [ + "create_aspect_type" + ] + }, + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateEntryType": { + "methods": [ + "create_entry_type" + ] + }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, + "DeleteAspectType": { + "methods": [ + "delete_aspect_type" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteEntryType": { + "methods": [ + "delete_entry_type" + ] + }, + "GetAspectType": { + "methods": [ + "get_aspect_type" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetEntryType": { + "methods": [ + "get_entry_type" + ] + }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, + "ListAspectTypes": { + "methods": [ + "list_aspect_types" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListEntryTypes": { + "methods": [ + "list_entry_types" + ] + }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "SearchEntries": { + "methods": [ + "search_entries" + ] + }, + "UpdateAspectType": { + "methods": [ + "update_aspect_type" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateEntryType": { + "methods": [ + "update_entry_type" + ] + } + } + } + } + }, + "ContentService": { + "clients": { + "grpc": { + "libraryClient": "ContentServiceClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ContentServiceAsyncClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + } + } + }, + "DataScanService": { + "clients": { + "grpc": { + "libraryClient": "DataScanServiceClient", + "rpcs": { + "CreateDataScan": { + "methods": [ + "create_data_scan" + ] + }, + "DeleteDataScan": { + "methods": [ + "delete_data_scan" + ] + }, + "GenerateDataQualityRules": { + "methods": [ + "generate_data_quality_rules" + ] + }, + "GetDataScan": { + "methods": [ + "get_data_scan" + ] + }, + "GetDataScanJob": { + "methods": [ + "get_data_scan_job" + ] + }, + "ListDataScanJobs": { + "methods": [ + "list_data_scan_jobs" + ] + }, + "ListDataScans": { + "methods": [ + "list_data_scans" + ] + }, + "RunDataScan": { + "methods": [ + "run_data_scan" + ] + }, + "UpdateDataScan": { + "methods": [ + "update_data_scan" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataScanServiceAsyncClient", + "rpcs": { + "CreateDataScan": { + "methods": [ + "create_data_scan" + ] + }, + "DeleteDataScan": { + "methods": [ + "delete_data_scan" + ] + }, + "GenerateDataQualityRules": { + "methods": [ + "generate_data_quality_rules" + ] + }, + "GetDataScan": { + "methods": [ + "get_data_scan" + ] + }, + "GetDataScanJob": { + "methods": [ + "get_data_scan_job" + ] + }, + "ListDataScanJobs": { + "methods": [ + "list_data_scan_jobs" + ] + }, + "ListDataScans": { + "methods": [ + "list_data_scans" + ] + }, + "RunDataScan": { + "methods": [ + "run_data_scan" + ] + }, + "UpdateDataScan": { + "methods": [ + "update_data_scan" + ] + } + } + } + } + }, + "DataTaxonomyService": { + "clients": { + "grpc": { + "libraryClient": "DataTaxonomyServiceClient", + "rpcs": { + "CreateDataAttribute": { + "methods": [ + "create_data_attribute" + ] + }, + "CreateDataAttributeBinding": { + "methods": [ + "create_data_attribute_binding" + ] + }, + "CreateDataTaxonomy": { + "methods": [ + "create_data_taxonomy" + ] + }, + "DeleteDataAttribute": { + "methods": [ + "delete_data_attribute" + ] + }, + "DeleteDataAttributeBinding": { + "methods": [ + "delete_data_attribute_binding" + ] + }, + "DeleteDataTaxonomy": { + "methods": [ + "delete_data_taxonomy" + ] + }, + "GetDataAttribute": { + "methods": [ + "get_data_attribute" + ] + }, + "GetDataAttributeBinding": { + "methods": [ + "get_data_attribute_binding" + ] + }, + "GetDataTaxonomy": { + "methods": [ + "get_data_taxonomy" + ] + }, + "ListDataAttributeBindings": { + "methods": [ + "list_data_attribute_bindings" + ] + }, + "ListDataAttributes": { + "methods": [ + "list_data_attributes" + ] + }, + "ListDataTaxonomies": { + "methods": [ + "list_data_taxonomies" + ] + }, + "UpdateDataAttribute": { + "methods": [ + "update_data_attribute" + ] + }, + "UpdateDataAttributeBinding": { + "methods": [ + "update_data_attribute_binding" + ] + }, + "UpdateDataTaxonomy": { + "methods": [ + "update_data_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataTaxonomyServiceAsyncClient", + "rpcs": { + "CreateDataAttribute": { + "methods": [ + "create_data_attribute" + ] + }, + "CreateDataAttributeBinding": { + "methods": [ + "create_data_attribute_binding" + ] + }, + "CreateDataTaxonomy": { + "methods": [ + "create_data_taxonomy" + ] + }, + "DeleteDataAttribute": { + "methods": [ + "delete_data_attribute" + ] + }, + "DeleteDataAttributeBinding": { + "methods": [ + "delete_data_attribute_binding" + ] + }, + "DeleteDataTaxonomy": { + "methods": [ + "delete_data_taxonomy" + ] + }, + "GetDataAttribute": { + "methods": [ + "get_data_attribute" + ] + }, + "GetDataAttributeBinding": { + "methods": [ + "get_data_attribute_binding" + ] + }, + "GetDataTaxonomy": { + "methods": [ + "get_data_taxonomy" + ] + }, + "ListDataAttributeBindings": { + "methods": [ + "list_data_attribute_bindings" + ] + }, + "ListDataAttributes": { + "methods": [ + "list_data_attributes" + ] + }, + "ListDataTaxonomies": { + "methods": [ + "list_data_taxonomies" + ] + }, + "UpdateDataAttribute": { + "methods": [ + "update_data_attribute" + ] + }, + "UpdateDataAttributeBinding": { + "methods": [ + "update_data_attribute_binding" + ] + }, + "UpdateDataTaxonomy": { + "methods": [ + "update_data_taxonomy" + ] + } + } + } + } + }, + "DataplexService": { + "clients": { + "grpc": { + "libraryClient": "DataplexServiceClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateAsset": { + "methods": [ + "create_asset" + ] + }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateLake": { + "methods": [ + "create_lake" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteAsset": { + "methods": [ + "delete_asset" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteLake": { + "methods": [ + "delete_lake" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetAsset": { + "methods": [ + "get_asset" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetLake": { + "methods": [ + "get_lake" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListAssetActions": { + "methods": [ + "list_asset_actions" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "ListLakeActions": { + "methods": [ + "list_lake_actions" + ] + }, + "ListLakes": { + "methods": [ + "list_lakes" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "ListZoneActions": { + "methods": [ + "list_zone_actions" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "UpdateAsset": { + "methods": [ + "update_asset" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, + "UpdateLake": { + "methods": [ + "update_lake" + ] + }, + "UpdateTask": { + "methods": [ + "update_task" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataplexServiceAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateAsset": { + "methods": [ + "create_asset" + ] + }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateLake": { + "methods": [ + "create_lake" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteAsset": { + "methods": [ + "delete_asset" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteLake": { + "methods": [ + "delete_lake" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetAsset": { + "methods": [ + "get_asset" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetLake": { + "methods": [ + "get_lake" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListAssetActions": { + "methods": [ + "list_asset_actions" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "ListLakeActions": { + "methods": [ + "list_lake_actions" + ] + }, + "ListLakes": { + "methods": [ + "list_lakes" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "ListZoneActions": { + "methods": [ + "list_zone_actions" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "UpdateAsset": { + "methods": [ + "update_asset" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, + "UpdateLake": { + "methods": [ + "update_lake" + ] + }, + "UpdateTask": { + "methods": [ + "update_task" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + } + } + }, + "MetadataService": { + "clients": { + "grpc": { + "libraryClient": "MetadataServiceClient", + "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, + "GetEntity": { + "methods": [ + "get_entity" + ] + }, + "GetPartition": { + "methods": [ + "get_partition" + ] + }, + "ListEntities": { + "methods": [ + "list_entities" + ] + }, + "ListPartitions": { + "methods": [ + "list_partitions" + ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetadataServiceAsyncClient", + "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, + "GetEntity": { + "methods": [ + "get_entity" + ] + }, + "GetPartition": { + "methods": [ + "get_partition" + ] + }, + "ListEntities": { + "methods": [ + "list_entities" + ] + }, + "ListPartitions": { + "methods": [ + "list_partitions" + ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed new file mode 100644 index 000000000000..c932c263028e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py new file mode 100644 index 000000000000..91f4e026ba8c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CatalogServiceClient +from .async_client import CatalogServiceAsyncClient + +__all__ = ( + 'CatalogServiceClient', + 'CatalogServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py new file mode 100644 index 000000000000..dfe5af31637d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -0,0 +1,3652 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport +from .client import CatalogServiceClient + + +class CatalogServiceAsyncClient: + """The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + """ + + _client: CatalogServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CatalogServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CatalogServiceClient._DEFAULT_UNIVERSE + + aspect_type_path = staticmethod(CatalogServiceClient.aspect_type_path) + parse_aspect_type_path = staticmethod(CatalogServiceClient.parse_aspect_type_path) + entry_path = staticmethod(CatalogServiceClient.entry_path) + parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) + entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) + parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) + entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) + parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) + parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) + common_billing_account_path = staticmethod(CatalogServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CatalogServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CatalogServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CatalogServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CatalogServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(CatalogServiceClient.parse_common_organization_path) + common_project_path = staticmethod(CatalogServiceClient.common_project_path) + parse_common_project_path = staticmethod(CatalogServiceClient.parse_common_project_path) + common_location_path = staticmethod(CatalogServiceClient.common_location_path) + parse_common_location_path = staticmethod(CatalogServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceAsyncClient: The constructed client. + """ + return CatalogServiceClient.from_service_account_info.__func__(CatalogServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceAsyncClient: The constructed client. + """ + return CatalogServiceClient.from_service_account_file.__func__(CatalogServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CatalogServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CatalogServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CatalogServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CatalogServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the catalog service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CatalogServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_entry_type(self, + request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_type: Optional[catalog.EntryType] = None, + entry_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): + The request object. Create EntryType Request. + parent (:class:`str`): + Required. The resource name of the EntryType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type_id (:class:`str`): + Required. EntryType identifier. + This corresponds to the ``entry_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_type, entry_type_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryTypeRequest): + request = catalog.CreateEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_type is not None: + request.entry_type = entry_type + if entry_type_id is not None: + request.entry_type_id = entry_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_entry_type(self, + request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, + *, + entry_type: Optional[catalog.EntryType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): + The request object. Update EntryType Request. + entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryTypeRequest): + request = catalog.UpdateEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_type is not None: + request.entry_type = entry_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_type.name", request.entry_type.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_entry_type(self, + request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): + The request object. Delele EntryType Request. + name (:class:`str`): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryTypeRequest): + request = catalog.DeleteEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_entry_types(self, + request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryTypesAsyncPager: + r"""Lists EntryType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]]): + The request object. List EntryTypes request + parent (:class:`str`): + Required. The resource name of the EntryType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: + List EntryTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryTypesRequest): + request = catalog.ListEntryTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryTypesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_type(self, + request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.EntryType: + r"""Gets an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): + The request object. Get EntryType request. + name (:class:`str`): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.EntryType: + Entry Type is a template for creating + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryTypeRequest): + request = catalog.GetEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_aspect_type(self, + request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + aspect_type: Optional[catalog.AspectType] = None, + aspect_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): + The request object. Create AspectType Request. + parent (:class:`str`): + Required. The resource name of the AspectType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): + Required. AspectType Resource. + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type_id (:class:`str`): + Required. AspectType identifier. + This corresponds to the ``aspect_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, aspect_type, aspect_type_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateAspectTypeRequest): + request = catalog.CreateAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if aspect_type is not None: + request.aspect_type = aspect_type + if aspect_type_id is not None: + request.aspect_type_id = aspect_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_aspect_type(self, + request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, + *, + aspect_type: Optional[catalog.AspectType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]]): + The request object. Update AspectType Request + aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): + Required. AspectType Resource + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([aspect_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateAspectTypeRequest): + request = catalog.UpdateAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if aspect_type is not None: + request.aspect_type = aspect_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("aspect_type.name", request.aspect_type.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_aspect_type(self, + request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): + The request object. Delele AspectType Request. + name (:class:`str`): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteAspectTypeRequest): + request = catalog.DeleteAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_aspect_types(self, + request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAspectTypesAsyncPager: + r"""Lists AspectType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): + The request object. List AspectTypes request. + parent (:class:`str`): + Required. The resource name of the AspectType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: + List AspectTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListAspectTypesRequest): + request = catalog.ListAspectTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_aspect_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAspectTypesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_aspect_type(self, + request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.AspectType: + r"""Gets an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_aspect_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): + The request object. Get AspectType request. + name (:class:`str`): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.AspectType: + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetAspectTypeRequest): + request = catalog.GetAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry_group(self, + request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group: Optional[catalog.EntryGroup] = None, + entry_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): + The request object. Create EntryGroup Request. + parent (:class:`str`): + Required. The resource name of the entryGroup, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (:class:`str`): + Required. EntryGroup identifier. + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group, entry_group_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryGroupRequest): + request = catalog.CreateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group is not None: + request.entry_group = entry_group + if entry_group_id is not None: + request.entry_group_id = entry_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_entry_group(self, + request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[catalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): + The request object. Update EntryGroup Request. + entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryGroupRequest): + request = catalog.UpdateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_entry_group(self, + request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): + The request object. Delete EntryGroup Request. + name (:class:`str`): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryGroupRequest): + request = catalog.DeleteEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_entry_groups(self, + request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsAsyncPager: + r"""Lists EntryGroup resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]]): + The request object. List entryGroups request. + parent (:class:`str`): + Required. The resource name of the entryGroup location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: + List entry groups response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryGroupsRequest): + request = catalog.ListEntryGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_group(self, + request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]]): + The request object. Get EntryGroup request. + name (:class:`str`): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.EntryGroup: + An Entry Group represents a logical + grouping of one or more Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryGroupRequest): + request = catalog.GetEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry(self, + request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry: Optional[catalog.Entry] = None, + entry_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Creates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): + The request object. Create Entry request. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (:class:`google.cloud.dataplex_v1.types.Entry`): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (:class:`str`): + Required. Entry identifier. It has to be unique within + an Entry Group. + + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or + systems other than Google Cloud. + + The maximum size of the field is 4000 characters. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry, entry_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryRequest): + request = catalog.CreateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry is not None: + request.entry = entry + if entry_id is not None: + request.entry_id = entry_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry(self, + request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[catalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Updates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): + The request object. Update Entry request. + entry (:class:`google.cloud.dataplex_v1.types.Entry`): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Mask of fields to update. To update Aspects, + the update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryRequest): + request = catalog.UpdateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry(self, + request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Deletes an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): + The request object. Delete Entry request. + name (:class:`str`): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryRequest): + request = catalog.DeleteEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entries(self, + request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesAsyncPager: + r"""Lists Entries within an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): + The request object. List Entries request. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: + List Entries response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntriesRequest): + request = catalog.ListEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry(self, + request: Optional[Union[catalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): + The request object. Get Entry request. + name (:class:`str`): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryRequest): + request = catalog.GetEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lookup_entry(self, + request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): + The request object. Lookup Entry request using + permissions in the source system. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.LookupEntryRequest): + request = catalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.lookup_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_entries(self, + request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, + *, + name: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchEntriesAsyncPager: + r"""Searches for Entries matching the given query and + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]]): + The request object. + name (:class:`str`): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. The query against which + entries in scope should be matched. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, query]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.SearchEntriesRequest): + request = catalog.SearchEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.search_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_metadata_job(self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): + The request object. Create metadata job request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (:class:`str`): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_metadata_job(self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): + The request object. Get metadata job request. + name (:class:`str`): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_metadata_jobs(self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsAsyncPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): + The request object. List metadata jobs request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetadataJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_metadata_job(self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): + The request object. Cancel metadata job request. + name (:class:`str`): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CatalogServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CatalogServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py new file mode 100644 index 000000000000..1c5c93d92538 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -0,0 +1,3986 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CatalogServiceGrpcTransport +from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport + + +class CatalogServiceClientMeta(type): + """Metaclass for the CatalogService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] + _transport_registry["grpc"] = CatalogServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CatalogServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CatalogServiceClient(metaclass=CatalogServiceClientMeta): + """The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CatalogServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CatalogServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def aspect_type_path(project: str,location: str,aspect_type: str,) -> str: + """Returns a fully-qualified aspect_type string.""" + return "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) + + @staticmethod + def parse_aspect_type_path(path: str) -> Dict[str,str]: + """Parses a aspect_type path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/aspectTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: + """Returns a fully-qualified entry string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + + @staticmethod + def parse_entry_path(path: str) -> Dict[str,str]: + """Parses a entry path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_group_path(project: str,location: str,entry_group: str,) -> str: + """Returns a fully-qualified entry_group string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + + @staticmethod + def parse_entry_group_path(path: str) -> Dict[str,str]: + """Parses a entry_group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_type_path(project: str,location: str,entry_type: str,) -> str: + """Returns a fully-qualified entry_type string.""" + return "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) + + @staticmethod + def parse_entry_type_path(path: str) -> Dict[str,str]: + """Parses a entry_type path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def metadata_job_path(project: str,location: str,metadataJob: str,) -> str: + """Returns a fully-qualified metadata_job string.""" + return "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) + + @staticmethod + def parse_metadata_job_path(path: str) -> Dict[str,str]: + """Parses a metadata_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CatalogServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the catalog service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CatalogServiceClient._read_environment_variables() + self._client_cert_source = CatalogServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = CatalogServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CatalogServiceTransport) + if transport_provided: + # transport is a CatalogServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CatalogServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CatalogServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport]] = ( + CatalogServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CatalogServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_entry_type(self, + request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_type: Optional[catalog.EntryType] = None, + entry_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): + The request object. Create EntryType Request. + parent (str): + Required. The resource name of the EntryType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type_id (str): + Required. EntryType identifier. + This corresponds to the ``entry_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_type, entry_type_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryTypeRequest): + request = catalog.CreateEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_type is not None: + request.entry_type = entry_type + if entry_type_id is not None: + request.entry_type_id = entry_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_entry_type(self, + request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, + *, + entry_type: Optional[catalog.EntryType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): + The request object. Update EntryType Request. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryTypeRequest): + request = catalog.UpdateEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_type is not None: + request.entry_type = entry_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_type.name", request.entry_type.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_entry_type(self, + request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): + The request object. Delele EntryType Request. + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryTypeRequest): + request = catalog.DeleteEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entry_types(self, + request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryTypesPager: + r"""Lists EntryType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]): + The request object. List EntryTypes request + parent (str): + Required. The resource name of the EntryType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: + List EntryTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryTypesRequest): + request = catalog.ListEntryTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryTypesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_type(self, + request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.EntryType: + r"""Gets an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): + The request object. Get EntryType request. + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.EntryType: + Entry Type is a template for creating + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryTypeRequest): + request = catalog.GetEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_aspect_type(self, + request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + aspect_type: Optional[catalog.AspectType] = None, + aspect_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): + The request object. Create AspectType Request. + parent (str): + Required. The resource name of the AspectType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource. + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type_id (str): + Required. AspectType identifier. + This corresponds to the ``aspect_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, aspect_type, aspect_type_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateAspectTypeRequest): + request = catalog.CreateAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if aspect_type is not None: + request.aspect_type = aspect_type + if aspect_type_id is not None: + request.aspect_type_id = aspect_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_aspect_type(self, + request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, + *, + aspect_type: Optional[catalog.AspectType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]): + The request object. Update AspectType Request + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([aspect_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateAspectTypeRequest): + request = catalog.UpdateAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if aspect_type is not None: + request.aspect_type = aspect_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("aspect_type.name", request.aspect_type.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_aspect_type(self, + request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): + The request object. Delele AspectType Request. + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteAspectTypeRequest): + request = catalog.DeleteAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_aspect_types(self, + request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAspectTypesPager: + r"""Lists AspectType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): + The request object. List AspectTypes request. + parent (str): + Required. The resource name of the AspectType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: + List AspectTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListAspectTypesRequest): + request = catalog.ListAspectTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_aspect_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAspectTypesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_aspect_type(self, + request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.AspectType: + r"""Gets an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_aspect_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): + The request object. Get AspectType request. + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.AspectType: + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetAspectTypeRequest): + request = catalog.GetAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry_group(self, + request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group: Optional[catalog.EntryGroup] = None, + entry_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): + The request object. Create EntryGroup Request. + parent (str): + Required. The resource name of the entryGroup, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (str): + Required. EntryGroup identifier. + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group, entry_group_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryGroupRequest): + request = catalog.CreateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group is not None: + request.entry_group = entry_group + if entry_group_id is not None: + request.entry_group_id = entry_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_entry_group(self, + request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[catalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): + The request object. Update EntryGroup Request. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryGroupRequest): + request = catalog.UpdateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_entry_group(self, + request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): + The request object. Delete EntryGroup Request. + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryGroupRequest): + request = catalog.DeleteEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entry_groups(self, + request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsPager: + r"""Lists EntryGroup resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]): + The request object. List entryGroups request. + parent (str): + Required. The resource name of the entryGroup location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: + List entry groups response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryGroupsRequest): + request = catalog.ListEntryGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_group(self, + request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]): + The request object. Get EntryGroup request. + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.EntryGroup: + An Entry Group represents a logical + grouping of one or more Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryGroupRequest): + request = catalog.GetEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry(self, + request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry: Optional[catalog.Entry] = None, + entry_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Creates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): + The request object. Create Entry request. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (str): + Required. Entry identifier. It has to be unique within + an Entry Group. + + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or + systems other than Google Cloud. + + The maximum size of the field is 4000 characters. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry, entry_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryRequest): + request = catalog.CreateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry is not None: + request.entry = entry + if entry_id is not None: + request.entry_id = entry_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry(self, + request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[catalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Updates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): + The request object. Update Entry request. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. To update Aspects, + the update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryRequest): + request = catalog.UpdateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry(self, + request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Deletes an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): + The request object. Delete Entry request. + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryRequest): + request = catalog.DeleteEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entries(self, + request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesPager: + r"""Lists Entries within an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): + The request object. List Entries request. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: + List Entries response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntriesRequest): + request = catalog.ListEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry(self, + request: Optional[Union[catalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): + The request object. Get Entry request. + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryRequest): + request = catalog.GetEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lookup_entry(self, + request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.Entry: + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): + The request object. Lookup Entry request using + permissions in the source system. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.LookupEntryRequest): + request = catalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_entries(self, + request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, + *, + name: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchEntriesPager: + r"""Searches for Entries matching the given query and + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]): + The request object. + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. The query against which + entries in scope should be matched. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, query]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.SearchEntriesRequest): + request = catalog.SearchEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_metadata_job(self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): + The request object. Create metadata job request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_metadata_job(self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): + The request object. Get metadata job request. + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_metadata_jobs(self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): + The request object. List metadata jobs request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetadataJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_metadata_job(self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): + The request object. Cancel metadata job request. + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "CatalogServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CatalogServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py new file mode 100644 index 000000000000..224b831f513a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py @@ -0,0 +1,837 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import catalog + + +class ListEntryTypesPager: + """A pager for iterating through ``list_entry_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryTypes`` requests and continue to iterate + through the ``entry_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntryTypesResponse], + request: catalog.ListEntryTypesRequest, + response: catalog.ListEntryTypesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntryTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntryTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.EntryType]: + for page in self.pages: + yield from page.entry_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryTypesAsyncPager: + """A pager for iterating through ``list_entry_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryTypes`` requests and continue to iterate + through the ``entry_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntryTypesResponse]], + request: catalog.ListEntryTypesRequest, + response: catalog.ListEntryTypesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntryTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntryTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.EntryType]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAspectTypesPager: + """A pager for iterating through ``list_aspect_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``aspect_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAspectTypes`` requests and continue to iterate + through the ``aspect_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListAspectTypesResponse], + request: catalog.ListAspectTypesRequest, + response: catalog.ListAspectTypesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListAspectTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListAspectTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.AspectType]: + for page in self.pages: + yield from page.aspect_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAspectTypesAsyncPager: + """A pager for iterating through ``list_aspect_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``aspect_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAspectTypes`` requests and continue to iterate + through the ``aspect_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListAspectTypesResponse]], + request: catalog.ListAspectTypesRequest, + response: catalog.ListAspectTypesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListAspectTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListAspectTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.AspectType]: + async def async_generator(): + async for page in self.pages: + for response in page.aspect_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntryGroupsResponse], + request: catalog.ListEntryGroupsRequest, + response: catalog.ListEntryGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntryGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.EntryGroup]: + for page in self.pages: + yield from page.entry_groups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsAsyncPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntryGroupsResponse]], + request: catalog.ListEntryGroupsRequest, + response: catalog.ListEntryGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntryGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.EntryGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntriesResponse], + request: catalog.ListEntriesRequest, + response: catalog.ListEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.Entry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesAsyncPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntriesResponse]], + request: catalog.ListEntriesRequest, + response: catalog.ListEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.Entry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchEntriesPager: + """A pager for iterating through ``search_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchEntries`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.SearchEntriesResponse], + request: catalog.SearchEntriesRequest, + response: catalog.SearchEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.SearchEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.SearchEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.SearchEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.SearchEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.SearchEntriesResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchEntriesAsyncPager: + """A pager for iterating through ``search_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchEntries`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.SearchEntriesResponse]], + request: catalog.SearchEntriesRequest, + response: catalog.SearchEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.SearchEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.SearchEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.SearchEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.SearchEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.SearchEntriesResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetadataJobsPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListMetadataJobsResponse], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.MetadataJob]: + for page in self.pages: + yield from page.metadata_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetadataJobsAsyncPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: + async def async_generator(): + async for page in self.pages: + for response in page.metadata_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst new file mode 100644 index 000000000000..c14dcbeef235 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CatalogServiceTransport` is the ABC for all transports. +- public child `CatalogServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CatalogServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCatalogServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CatalogServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py new file mode 100644 index 000000000000..7e6b046ff29f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CatalogServiceTransport +from .grpc import CatalogServiceGrpcTransport +from .grpc_asyncio import CatalogServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] +_transport_registry['grpc'] = CatalogServiceGrpcTransport +_transport_registry['grpc_asyncio'] = CatalogServiceGrpcAsyncIOTransport + +__all__ = ( + 'CatalogServiceTransport', + 'CatalogServiceGrpcTransport', + 'CatalogServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py new file mode 100644 index 000000000000..dc0255a4f11a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -0,0 +1,707 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class CatalogServiceTransport(abc.ABC): + """Abstract transport class for CatalogService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_entry_type: gapic_v1.method.wrap_method( + self.create_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_type: gapic_v1.method.wrap_method( + self.update_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_type: gapic_v1.method.wrap_method( + self.delete_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_types: gapic_v1.method.wrap_method( + self.list_entry_types, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_type: gapic_v1.method.wrap_method( + self.get_entry_type, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_aspect_type: gapic_v1.method.wrap_method( + self.create_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_aspect_type: gapic_v1.method.wrap_method( + self.update_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_aspect_type: gapic_v1.method.wrap_method( + self.delete_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_aspect_types: gapic_v1.method.wrap_method( + self.list_aspect_types, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aspect_type: gapic_v1.method.wrap_method( + self.get_aspect_type, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry_group: gapic_v1.method.wrap_method( + self.create_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_group: gapic_v1.method.wrap_method( + self.update_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_group: gapic_v1.method.wrap_method( + self.delete_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_groups: gapic_v1.method.wrap_method( + self.list_entry_groups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_group: gapic_v1.method.wrap_method( + self.get_entry_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry: gapic_v1.method.wrap_method( + self.create_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry: gapic_v1.method.wrap_method( + self.update_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry: gapic_v1.method.wrap_method( + self.delete_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entries: gapic_v1.method.wrap_method( + self.list_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_entry: gapic_v1.method.wrap_method( + self.get_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lookup_entry: gapic_v1.method.wrap_method( + self.lookup_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.search_entries: gapic_v1.method.wrap_method( + self.search_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_metadata_job: gapic_v1.method.wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: gapic_v1.method.wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: gapic_v1.method.wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: gapic_v1.method.wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + Union[ + catalog.ListEntryTypesResponse, + Awaitable[catalog.ListEntryTypesResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + Union[ + catalog.EntryType, + Awaitable[catalog.EntryType] + ]]: + raise NotImplementedError() + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + Union[ + catalog.ListAspectTypesResponse, + Awaitable[catalog.ListAspectTypesResponse] + ]]: + raise NotImplementedError() + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + Union[ + catalog.AspectType, + Awaitable[catalog.AspectType] + ]]: + raise NotImplementedError() + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + Union[ + catalog.ListEntryGroupsResponse, + Awaitable[catalog.ListEntryGroupsResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + Union[ + catalog.EntryGroup, + Awaitable[catalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + Union[ + catalog.ListEntriesResponse, + Awaitable[catalog.ListEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + Union[ + catalog.SearchEntriesResponse, + Awaitable[catalog.SearchEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + Union[ + catalog.MetadataJob, + Awaitable[catalog.MetadataJob] + ]]: + raise NotImplementedError() + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + Union[ + catalog.ListMetadataJobsResponse, + Awaitable[catalog.ListMetadataJobsResponse] + ]]: + raise NotImplementedError() + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CatalogServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py new file mode 100644 index 000000000000..72fef5ad3155 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -0,0 +1,1076 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO + + +class CatalogServiceGrpcTransport(CatalogServiceTransport): + """gRPC backend transport for CatalogService. + + The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create entry type method over gRPC. + + Creates an EntryType. + + Returns: + Callable[[~.CreateEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_type' not in self._stubs: + self._stubs['create_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', + request_serializer=catalog.CreateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_type'] + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update entry type method over gRPC. + + Updates an EntryType. + + Returns: + Callable[[~.UpdateEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_type' not in self._stubs: + self._stubs['update_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', + request_serializer=catalog.UpdateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_type'] + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete entry type method over gRPC. + + Deletes an EntryType. + + Returns: + Callable[[~.DeleteEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_type' not in self._stubs: + self._stubs['delete_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', + request_serializer=catalog.DeleteEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_type'] + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + catalog.ListEntryTypesResponse]: + r"""Return a callable for the list entry types method over gRPC. + + Lists EntryType resources in a project and location. + + Returns: + Callable[[~.ListEntryTypesRequest], + ~.ListEntryTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_types' not in self._stubs: + self._stubs['list_entry_types'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', + request_serializer=catalog.ListEntryTypesRequest.serialize, + response_deserializer=catalog.ListEntryTypesResponse.deserialize, + ) + return self._stubs['list_entry_types'] + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + catalog.EntryType]: + r"""Return a callable for the get entry type method over gRPC. + + Gets an EntryType. + + Returns: + Callable[[~.GetEntryTypeRequest], + ~.EntryType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_type' not in self._stubs: + self._stubs['get_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryType', + request_serializer=catalog.GetEntryTypeRequest.serialize, + response_deserializer=catalog.EntryType.deserialize, + ) + return self._stubs['get_entry_type'] + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create aspect type method over gRPC. + + Creates an AspectType. + + Returns: + Callable[[~.CreateAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_aspect_type' not in self._stubs: + self._stubs['create_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', + request_serializer=catalog.CreateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_aspect_type'] + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update aspect type method over gRPC. + + Updates an AspectType. + + Returns: + Callable[[~.UpdateAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_aspect_type' not in self._stubs: + self._stubs['update_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', + request_serializer=catalog.UpdateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_aspect_type'] + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete aspect type method over gRPC. + + Deletes an AspectType. + + Returns: + Callable[[~.DeleteAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_aspect_type' not in self._stubs: + self._stubs['delete_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', + request_serializer=catalog.DeleteAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_aspect_type'] + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + catalog.ListAspectTypesResponse]: + r"""Return a callable for the list aspect types method over gRPC. + + Lists AspectType resources in a project and location. + + Returns: + Callable[[~.ListAspectTypesRequest], + ~.ListAspectTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_aspect_types' not in self._stubs: + self._stubs['list_aspect_types'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', + request_serializer=catalog.ListAspectTypesRequest.serialize, + response_deserializer=catalog.ListAspectTypesResponse.deserialize, + ) + return self._stubs['list_aspect_types'] + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + catalog.AspectType]: + r"""Return a callable for the get aspect type method over gRPC. + + Gets an AspectType. + + Returns: + Callable[[~.GetAspectTypeRequest], + ~.AspectType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_aspect_type' not in self._stubs: + self._stubs['get_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetAspectType', + request_serializer=catalog.GetAspectTypeRequest.serialize, + response_deserializer=catalog.AspectType.deserialize, + ) + return self._stubs['get_aspect_type'] + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an EntryGroup. + + Returns: + Callable[[~.CreateEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', + request_serializer=catalog.CreateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', + request_serializer=catalog.UpdateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', + request_serializer=catalog.DeleteEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + catalog.ListEntryGroupsResponse]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists EntryGroup resources in a project and location. + + Returns: + Callable[[~.ListEntryGroupsRequest], + ~.ListEntryGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', + request_serializer=catalog.ListEntryGroupsRequest.serialize, + response_deserializer=catalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + catalog.EntryGroup]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', + request_serializer=catalog.GetEntryGroupRequest.serialize, + response_deserializer=catalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + catalog.Entry]: + r"""Return a callable for the create entry method over gRPC. + + Creates an Entry. + + Returns: + Callable[[~.CreateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntry', + request_serializer=catalog.CreateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + catalog.Entry]: + r"""Return a callable for the update entry method over gRPC. + + Updates an Entry. + + Returns: + Callable[[~.UpdateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', + request_serializer=catalog.UpdateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + catalog.Entry]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an Entry. + + Returns: + Callable[[~.DeleteEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', + request_serializer=catalog.DeleteEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['delete_entry'] + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + catalog.ListEntriesResponse]: + r"""Return a callable for the list entries method over gRPC. + + Lists Entries within an EntryGroup. + + Returns: + Callable[[~.ListEntriesRequest], + ~.ListEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntries', + request_serializer=catalog.ListEntriesRequest.serialize, + response_deserializer=catalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + catalog.Entry]: + r"""Return a callable for the get entry method over gRPC. + + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + Returns: + Callable[[~.GetEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntry', + request_serializer=catalog.GetEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + catalog.Entry]: + r"""Return a callable for the lookup entry method over gRPC. + + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + Returns: + Callable[[~.LookupEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/LookupEntry', + request_serializer=catalog.LookupEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + catalog.SearchEntriesResponse]: + r"""Return a callable for the search entries method over gRPC. + + Searches for Entries matching the given query and + scope. + + Returns: + Callable[[~.SearchEntriesRequest], + ~.SearchEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_entries' not in self._stubs: + self._stubs['search_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/SearchEntries', + request_serializer=catalog.SearchEntriesRequest.serialize, + response_deserializer=catalog.SearchEntriesResponse.deserialize, + ) + return self._stubs['search_entries'] + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metadata_job' not in self._stubs: + self._stubs['create_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_metadata_job'] + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + catalog.MetadataJob]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + ~.MetadataJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata_job' not in self._stubs: + self._stubs['get_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs['get_metadata_job'] + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + catalog.ListMetadataJobsResponse]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + ~.ListMetadataJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metadata_jobs' not in self._stubs: + self._stubs['list_metadata_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs['list_metadata_jobs'] + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_metadata_job' not in self._stubs: + self._stubs['cancel_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_metadata_job'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CatalogServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..20c5361626d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -0,0 +1,1362 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import CatalogServiceGrpcTransport + + +class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): + """gRPC AsyncIO backend transport for CatalogService. + + The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create entry type method over gRPC. + + Creates an EntryType. + + Returns: + Callable[[~.CreateEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_type' not in self._stubs: + self._stubs['create_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', + request_serializer=catalog.CreateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_type'] + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update entry type method over gRPC. + + Updates an EntryType. + + Returns: + Callable[[~.UpdateEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_type' not in self._stubs: + self._stubs['update_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', + request_serializer=catalog.UpdateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_type'] + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete entry type method over gRPC. + + Deletes an EntryType. + + Returns: + Callable[[~.DeleteEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_type' not in self._stubs: + self._stubs['delete_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', + request_serializer=catalog.DeleteEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_type'] + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + Awaitable[catalog.ListEntryTypesResponse]]: + r"""Return a callable for the list entry types method over gRPC. + + Lists EntryType resources in a project and location. + + Returns: + Callable[[~.ListEntryTypesRequest], + Awaitable[~.ListEntryTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_types' not in self._stubs: + self._stubs['list_entry_types'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', + request_serializer=catalog.ListEntryTypesRequest.serialize, + response_deserializer=catalog.ListEntryTypesResponse.deserialize, + ) + return self._stubs['list_entry_types'] + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + Awaitable[catalog.EntryType]]: + r"""Return a callable for the get entry type method over gRPC. + + Gets an EntryType. + + Returns: + Callable[[~.GetEntryTypeRequest], + Awaitable[~.EntryType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_type' not in self._stubs: + self._stubs['get_entry_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryType', + request_serializer=catalog.GetEntryTypeRequest.serialize, + response_deserializer=catalog.EntryType.deserialize, + ) + return self._stubs['get_entry_type'] + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create aspect type method over gRPC. + + Creates an AspectType. + + Returns: + Callable[[~.CreateAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_aspect_type' not in self._stubs: + self._stubs['create_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', + request_serializer=catalog.CreateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_aspect_type'] + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update aspect type method over gRPC. + + Updates an AspectType. + + Returns: + Callable[[~.UpdateAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_aspect_type' not in self._stubs: + self._stubs['update_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', + request_serializer=catalog.UpdateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_aspect_type'] + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete aspect type method over gRPC. + + Deletes an AspectType. + + Returns: + Callable[[~.DeleteAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_aspect_type' not in self._stubs: + self._stubs['delete_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', + request_serializer=catalog.DeleteAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_aspect_type'] + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + Awaitable[catalog.ListAspectTypesResponse]]: + r"""Return a callable for the list aspect types method over gRPC. + + Lists AspectType resources in a project and location. + + Returns: + Callable[[~.ListAspectTypesRequest], + Awaitable[~.ListAspectTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_aspect_types' not in self._stubs: + self._stubs['list_aspect_types'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', + request_serializer=catalog.ListAspectTypesRequest.serialize, + response_deserializer=catalog.ListAspectTypesResponse.deserialize, + ) + return self._stubs['list_aspect_types'] + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + Awaitable[catalog.AspectType]]: + r"""Return a callable for the get aspect type method over gRPC. + + Gets an AspectType. + + Returns: + Callable[[~.GetAspectTypeRequest], + Awaitable[~.AspectType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_aspect_type' not in self._stubs: + self._stubs['get_aspect_type'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetAspectType', + request_serializer=catalog.GetAspectTypeRequest.serialize, + response_deserializer=catalog.AspectType.deserialize, + ) + return self._stubs['get_aspect_type'] + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an EntryGroup. + + Returns: + Callable[[~.CreateEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', + request_serializer=catalog.CreateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', + request_serializer=catalog.UpdateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', + request_serializer=catalog.DeleteEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + Awaitable[catalog.ListEntryGroupsResponse]]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists EntryGroup resources in a project and location. + + Returns: + Callable[[~.ListEntryGroupsRequest], + Awaitable[~.ListEntryGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', + request_serializer=catalog.ListEntryGroupsRequest.serialize, + response_deserializer=catalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + Awaitable[catalog.EntryGroup]]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', + request_serializer=catalog.GetEntryGroupRequest.serialize, + response_deserializer=catalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the create entry method over gRPC. + + Creates an Entry. + + Returns: + Callable[[~.CreateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntry', + request_serializer=catalog.CreateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the update entry method over gRPC. + + Updates an Entry. + + Returns: + Callable[[~.UpdateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', + request_serializer=catalog.UpdateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an Entry. + + Returns: + Callable[[~.DeleteEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', + request_serializer=catalog.DeleteEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['delete_entry'] + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + Awaitable[catalog.ListEntriesResponse]]: + r"""Return a callable for the list entries method over gRPC. + + Lists Entries within an EntryGroup. + + Returns: + Callable[[~.ListEntriesRequest], + Awaitable[~.ListEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntries', + request_serializer=catalog.ListEntriesRequest.serialize, + response_deserializer=catalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the get entry method over gRPC. + + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + Returns: + Callable[[~.GetEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntry', + request_serializer=catalog.GetEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the lookup entry method over gRPC. + + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. + + Returns: + Callable[[~.LookupEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/LookupEntry', + request_serializer=catalog.LookupEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + Awaitable[catalog.SearchEntriesResponse]]: + r"""Return a callable for the search entries method over gRPC. + + Searches for Entries matching the given query and + scope. + + Returns: + Callable[[~.SearchEntriesRequest], + Awaitable[~.SearchEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_entries' not in self._stubs: + self._stubs['search_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/SearchEntries', + request_serializer=catalog.SearchEntriesRequest.serialize, + response_deserializer=catalog.SearchEntriesResponse.deserialize, + ) + return self._stubs['search_entries'] + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metadata_job' not in self._stubs: + self._stubs['create_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_metadata_job'] + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + Awaitable[catalog.MetadataJob]]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + Awaitable[~.MetadataJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata_job' not in self._stubs: + self._stubs['get_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs['get_metadata_job'] + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + Awaitable[catalog.ListMetadataJobsResponse]]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + Awaitable[~.ListMetadataJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metadata_jobs' not in self._stubs: + self._stubs['list_metadata_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs['list_metadata_jobs'] + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_metadata_job' not in self._stubs: + self._stubs['cancel_metadata_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_metadata_job'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entry_type: self._wrap_method( + self.create_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_type: self._wrap_method( + self.update_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_type: self._wrap_method( + self.delete_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_types: self._wrap_method( + self.list_entry_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_type: self._wrap_method( + self.get_entry_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_aspect_type: self._wrap_method( + self.create_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_aspect_type: self._wrap_method( + self.update_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_aspect_type: self._wrap_method( + self.delete_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_aspect_types: self._wrap_method( + self.list_aspect_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aspect_type: self._wrap_method( + self.get_aspect_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry_group: self._wrap_method( + self.create_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_group: self._wrap_method( + self.update_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_group: self._wrap_method( + self.delete_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_groups: self._wrap_method( + self.list_entry_groups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_group: self._wrap_method( + self.get_entry_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry: self._wrap_method( + self.create_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry: self._wrap_method( + self.update_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry: self._wrap_method( + self.delete_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entries: self._wrap_method( + self.list_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_entry: self._wrap_method( + self.get_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lookup_entry: self._wrap_method( + self.lookup_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.search_entries: self._wrap_method( + self.search_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_metadata_job: self._wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: self._wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: self._wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: self._wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'CatalogServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py new file mode 100644 index 000000000000..d5c571190bc7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ContentServiceClient +from .async_client import ContentServiceAsyncClient + +__all__ = ( + 'ContentServiceClient', + 'ContentServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py new file mode 100644 index 000000000000..0d74d046caec --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -0,0 +1,1435 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport +from .client import ContentServiceClient + + +class ContentServiceAsyncClient: + """ContentService manages Notebook and SQL Scripts for Dataplex.""" + + _client: ContentServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ContentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ContentServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ContentServiceClient._DEFAULT_UNIVERSE + + content_path = staticmethod(ContentServiceClient.content_path) + parse_content_path = staticmethod(ContentServiceClient.parse_content_path) + lake_path = staticmethod(ContentServiceClient.lake_path) + parse_lake_path = staticmethod(ContentServiceClient.parse_lake_path) + common_billing_account_path = staticmethod(ContentServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ContentServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ContentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ContentServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ContentServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(ContentServiceClient.parse_common_organization_path) + common_project_path = staticmethod(ContentServiceClient.common_project_path) + parse_common_project_path = staticmethod(ContentServiceClient.parse_common_project_path) + common_location_path = staticmethod(ContentServiceClient.common_location_path) + parse_common_location_path = staticmethod(ContentServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_info.__func__(ContentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_file.__func__(ContentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ContentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ContentServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ContentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_content(self, + request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[analyze.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Create a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = await client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]]): + The request object. Create content request. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_content(self, + request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, + *, + content: Optional[analyze.Content] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = await client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]]): + The request object. Update content request. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("content.name", request.content.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_content(self, + request: Optional[Union[content.DeleteContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_content(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]]): + The request object. Delete content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_content(self, + request: Optional[Union[content.GetContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]]): + The request object. Get content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_content(self, + request: Optional[Union[content.ListContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContentAsyncPager: + r"""List content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]]): + The request object. List content request. Returns the + BASIC Content view. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: + List content response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListContentAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ContentServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ContentServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py new file mode 100644 index 000000000000..ff4cbd86c3cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py @@ -0,0 +1,1767 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ContentServiceGrpcTransport +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport + + +class ContentServiceClientMeta(type): + """Metaclass for the ContentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] + _transport_registry["grpc"] = ContentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ContentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ContentServiceClient(metaclass=ContentServiceClientMeta): + """ContentService manages Notebook and SQL Scripts for Dataplex.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def content_path(project: str,location: str,lake: str,content: str,) -> str: + """Returns a fully-qualified content string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) + + @staticmethod + def parse_content_path(path: str) -> Dict[str,str]: + """Parses a content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/content/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def lake_path(project: str,location: str,lake: str,) -> str: + """Returns a fully-qualified lake string.""" + return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + + @staticmethod + def parse_lake_path(path: str) -> Dict[str,str]: + """Parses a lake path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ContentServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ContentServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ContentServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ContentServiceClient._read_environment_variables() + self._client_cert_source = ContentServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ContentServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ContentServiceTransport) + if transport_provided: + # transport is a ContentServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ContentServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ContentServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ContentServiceTransport], Callable[..., ContentServiceTransport]] = ( + ContentServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ContentServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_content(self, + request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[analyze.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Create a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): + The request object. Create content request. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_content(self, + request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, + *, + content: Optional[analyze.Content] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): + The request object. Update content request. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("content.name", request.content.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_content(self, + request: Optional[Union[content.DeleteContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): + The request object. Delete content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_content(self, + request: Optional[Union[content.GetContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): + The request object. Get content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_content(self, + request: Optional[Union[content.ListContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContentPager: + r"""List content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): + The request object. List content request. Returns the + BASIC Content view. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: + List content response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListContentPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ContentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ContentServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py new file mode 100644 index 000000000000..3659ec82f6fd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content + + +class ListContentPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__iter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., content.ListContentResponse], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Content]: + for page in self.pages: + yield from page.content + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListContentAsyncPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[content.ListContentResponse]], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Content]: + async def async_generator(): + async for page in self.pages: + for response in page.content: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst new file mode 100644 index 000000000000..f737919bf8e5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ContentServiceTransport` is the ABC for all transports. +- public child `ContentServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ContentServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseContentServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ContentServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py new file mode 100644 index 000000000000..700cca6c0dfc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ContentServiceTransport +from .grpc import ContentServiceGrpcTransport +from .grpc_asyncio import ContentServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] +_transport_registry['grpc'] = ContentServiceGrpcTransport +_transport_registry['grpc_asyncio'] = ContentServiceGrpcAsyncIOTransport + +__all__ = ( + 'ContentServiceTransport', + 'ContentServiceGrpcTransport', + 'ContentServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py new file mode 100644 index 000000000000..7d77d96adb47 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ContentServiceTransport(abc.ABC): + """Abstract transport class for ContentService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_content: gapic_v1.method.wrap_method( + self.create_content, + default_timeout=60.0, + client_info=client_info, + ), + self.update_content: gapic_v1.method.wrap_method( + self.update_content, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_content: gapic_v1.method.wrap_method( + self.delete_content, + default_timeout=60.0, + client_info=client_info, + ), + self.get_content: gapic_v1.method.wrap_method( + self.get_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_content: gapic_v1.method.wrap_method( + self.list_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + Union[ + content.ListContentResponse, + Awaitable[content.ListContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ContentServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py new file mode 100644 index 000000000000..54d08d185201 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -0,0 +1,587 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO + + +class ContentServiceGrpcTransport(ContentServiceTransport): + """gRPC backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + analyze.Content]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_content' not in self._stubs: + self._stubs['create_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/CreateContent', + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['create_content'] + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + analyze.Content]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_content' not in self._stubs: + self._stubs['update_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/UpdateContent', + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['update_content'] + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_content' not in self._stubs: + self._stubs['delete_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/DeleteContent', + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_content'] + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + analyze.Content]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_content' not in self._stubs: + self._stubs['get_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetContent', + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['get_content'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + content.ListContentResponse]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + ~.ListContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_content' not in self._stubs: + self._stubs['list_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/ListContent', + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs['list_content'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ContentServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ba7088c07b69 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -0,0 +1,709 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import ContentServiceGrpcTransport + + +class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): + """gRPC AsyncIO backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_content' not in self._stubs: + self._stubs['create_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/CreateContent', + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['create_content'] + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_content' not in self._stubs: + self._stubs['update_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/UpdateContent', + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['update_content'] + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_content' not in self._stubs: + self._stubs['delete_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/DeleteContent', + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_content'] + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_content' not in self._stubs: + self._stubs['get_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetContent', + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['get_content'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + Awaitable[content.ListContentResponse]]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + Awaitable[~.ListContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_content' not in self._stubs: + self._stubs['list_content'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/ListContent', + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs['list_content'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_content: self._wrap_method( + self.create_content, + default_timeout=60.0, + client_info=client_info, + ), + self.update_content: self._wrap_method( + self.update_content, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_content: self._wrap_method( + self.delete_content, + default_timeout=60.0, + client_info=client_info, + ), + self.get_content: self._wrap_method( + self.get_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_content: self._wrap_method( + self.list_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'ContentServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py new file mode 100644 index 000000000000..1500c4168c10 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataScanServiceClient +from .async_client import DataScanServiceAsyncClient + +__all__ = ( + 'DataScanServiceClient', + 'DataScanServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py new file mode 100644 index 000000000000..d370f4f64327 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -0,0 +1,1665 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport +from .client import DataScanServiceClient + + +class DataScanServiceAsyncClient: + """DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + """ + + _client: DataScanServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataScanServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE + + connection_path = staticmethod(DataScanServiceClient.connection_path) + parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) + data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) + parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) + data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) + parse_data_scan_job_path = staticmethod(DataScanServiceClient.parse_data_scan_job_path) + dataset_path = staticmethod(DataScanServiceClient.dataset_path) + parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) + entity_path = staticmethod(DataScanServiceClient.entity_path) + parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) + common_billing_account_path = staticmethod(DataScanServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataScanServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataScanServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataScanServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataScanServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataScanServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataScanServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataScanServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataScanServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataScanServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceAsyncClient: The constructed client. + """ + return DataScanServiceClient.from_service_account_info.__func__(DataScanServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceAsyncClient: The constructed client. + """ + return DataScanServiceClient.from_service_account_file.__func__(DataScanServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataScanServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataScanServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataScanServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataScanServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data scan service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataScanServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_data_scan(self, + request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_scan: Optional[datascans.DataScan] = None, + data_scan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]]): + The request object. Create dataScan request. + parent (:class:`str`): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): + Required. DataScan resource. + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan_id (:class:`str`): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / + location. + + This corresponds to the ``data_scan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_scan, data_scan_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.CreateDataScanRequest): + request = datascans.CreateDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_scan is not None: + request.data_scan = data_scan + if data_scan_id is not None: + request.data_scan_id = data_scan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_scan(self, + request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, + *, + data_scan: Optional[datascans.DataScan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]]): + The request object. Update dataScan request. + data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_scan, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.UpdateDataScanRequest): + request = datascans.UpdateDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_scan is not None: + request.data_scan = data_scan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_scan.name", request.data_scan.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_scan(self, + request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]]): + The request object. Delete dataScan request. + name (:class:`str`): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.DeleteDataScanRequest): + request = datascans.DeleteDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_data_scan(self, + request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.DataScan: + r"""Gets a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]]): + The request object. Get dataScan request. + name (:class:`str`): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataScan: + Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanRequest): + request = datascans.GetDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_scans(self, + request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataScansAsyncPager: + r"""Lists DataScans. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]]): + The request object. List dataScans request. + parent (:class:`str`): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager: + List dataScans response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScansRequest): + request = datascans.ListDataScansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataScansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_data_scan(self, + request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.RunDataScanResponse: + r"""Runs an on-demand execution of a DataScan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.run_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]]): + The request object. Run DataScan Request + name (:class:`str`): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + Only **OnDemand** data scans are allowed. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.RunDataScanResponse: + Run DataScan Response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.RunDataScanRequest): + request = datascans.RunDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_scan_job(self, + request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.DataScanJob: + r"""Gets a DataScanJob resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]]): + The request object. Get DataScanJob request. + name (:class:`str`): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataScanJob: + A DataScanJob represents an instance + of DataScan execution. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanJobRequest): + request = datascans.GetDataScanJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_scan_jobs(self, + request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataScanJobsAsyncPager: + r"""Lists DataScanJobs under the given DataScan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]]): + The request object. List DataScanJobs request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager: + List DataScanJobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScanJobsRequest): + request = datascans.ListDataScanJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scan_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataScanJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_data_quality_rules(self, + request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.GenerateDataQualityRulesResponse: + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]]): + The request object. Request details for generating data + quality rule recommendations. + name (:class:`str`): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling + job (a data scan job where the job type is data + profiling) + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: + Response details for data quality + rule recommendations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GenerateDataQualityRulesRequest): + request = datascans.GenerateDataQualityRulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_data_quality_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataScanServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataScanServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py new file mode 100644 index 000000000000..750cb340c2d1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -0,0 +1,2015 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataScanServiceGrpcTransport +from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport + + +class DataScanServiceClientMeta(type): + """Metaclass for the DataScanService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] + _transport_registry["grpc"] = DataScanServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataScanServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataScanServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataScanServiceClient(metaclass=DataScanServiceClientMeta): + """DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataScanServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataScanServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_path(project: str,location: str,connection: str,) -> str: + """Returns a fully-qualified connection string.""" + return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str,str]: + """Parses a connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_scan_path(project: str,location: str,dataScan: str,) -> str: + """Returns a fully-qualified data_scan string.""" + return "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) + + @staticmethod + def parse_data_scan_path(path: str) -> Dict[str,str]: + """Parses a data_scan path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_scan_job_path(project: str,location: str,dataScan: str,job: str,) -> str: + """Returns a fully-qualified data_scan_job string.""" + return "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) + + @staticmethod + def parse_data_scan_job_path(path: str) -> Dict[str,str]: + """Parses a data_scan_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dataset_path(project: str,dataset: str,) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str,str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: + """Returns a fully-qualified entity string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + + @staticmethod + def parse_entity_path(path: str) -> Dict[str,str]: + """Parses a entity path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataScanServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data scan service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataScanServiceClient._read_environment_variables() + self._client_cert_source = DataScanServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataScanServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataScanServiceTransport) + if transport_provided: + # transport is a DataScanServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataScanServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataScanServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport]] = ( + DataScanServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataScanServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_data_scan(self, + request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_scan: Optional[datascans.DataScan] = None, + data_scan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]): + The request object. Create dataScan request. + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource. + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan_id (str): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / + location. + + This corresponds to the ``data_scan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_scan, data_scan_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.CreateDataScanRequest): + request = datascans.CreateDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_scan is not None: + request.data_scan = data_scan + if data_scan_id is not None: + request.data_scan_id = data_scan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_scan(self, + request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, + *, + data_scan: Optional[datascans.DataScan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]): + The request object. Update dataScan request. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_scan, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.UpdateDataScanRequest): + request = datascans.UpdateDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_scan is not None: + request.data_scan = data_scan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_scan.name", request.data_scan.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_scan(self, + request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]): + The request object. Delete dataScan request. + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.DeleteDataScanRequest): + request = datascans.DeleteDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_data_scan(self, + request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.DataScan: + r"""Gets a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]): + The request object. Get dataScan request. + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataScan: + Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data Quality: generates queries based on the rules + and runs against the data to get data quality + check results. + - Data Profile: analyzes the data in table(s) and + generates insights about the structure, content + and relationships (such as null percent, + cardinality, min/max/mean, etc). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanRequest): + request = datascans.GetDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_scans(self, + request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataScansPager: + r"""Lists DataScans. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]): + The request object. List dataScans request. + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager: + List dataScans response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScansRequest): + request = datascans.ListDataScansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_scans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataScansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_data_scan(self, + request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.RunDataScanResponse: + r"""Runs an on-demand execution of a DataScan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.run_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]): + The request object. Run DataScan Request + name (str): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + Only **OnDemand** data scans are allowed. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.RunDataScanResponse: + Run DataScan Response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.RunDataScanRequest): + request = datascans.RunDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_scan_job(self, + request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.DataScanJob: + r"""Gets a DataScanJob resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]): + The request object. Get DataScanJob request. + name (str): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataScanJob: + A DataScanJob represents an instance + of DataScan execution. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanJobRequest): + request = datascans.GetDataScanJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_scan_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_scan_jobs(self, + request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataScanJobsPager: + r"""Lists DataScanJobs under the given DataScan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]): + The request object. List DataScanJobs request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP + region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager: + List DataScanJobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScanJobsRequest): + request = datascans.ListDataScanJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_scan_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataScanJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_data_quality_rules(self, + request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datascans.GenerateDataQualityRulesResponse: + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]): + The request object. Request details for generating data + quality rule recommendations. + name (str): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling + job (a data scan job where the job type is data + profiling) + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: + Response details for data quality + rule recommendations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GenerateDataQualityRulesRequest): + request = datascans.GenerateDataQualityRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_data_quality_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataScanServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataScanServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py new file mode 100644 index 000000000000..897a4108391e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import datascans + + +class ListDataScansPager: + """A pager for iterating through ``list_data_scans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_scans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataScans`` requests and continue to iterate + through the ``data_scans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datascans.ListDataScansResponse], + request: datascans.ListDataScansRequest, + response: datascans.ListDataScansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScansRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datascans.ListDataScansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datascans.ListDataScansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datascans.DataScan]: + for page in self.pages: + yield from page.data_scans + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScansAsyncPager: + """A pager for iterating through ``list_data_scans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_scans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataScans`` requests and continue to iterate + through the ``data_scans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datascans.ListDataScansResponse]], + request: datascans.ListDataScansRequest, + response: datascans.ListDataScansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScansRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datascans.ListDataScansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datascans.ListDataScansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datascans.DataScan]: + async def async_generator(): + async for page in self.pages: + for response in page.data_scans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScanJobsPager: + """A pager for iterating through ``list_data_scan_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_scan_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataScanJobs`` requests and continue to iterate + through the ``data_scan_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datascans.ListDataScanJobsResponse], + request: datascans.ListDataScanJobsRequest, + response: datascans.ListDataScanJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datascans.ListDataScanJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datascans.ListDataScanJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datascans.DataScanJob]: + for page in self.pages: + yield from page.data_scan_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScanJobsAsyncPager: + """A pager for iterating through ``list_data_scan_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_scan_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataScanJobs`` requests and continue to iterate + through the ``data_scan_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datascans.ListDataScanJobsResponse]], + request: datascans.ListDataScanJobsRequest, + response: datascans.ListDataScanJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datascans.ListDataScanJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datascans.ListDataScanJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datascans.DataScanJob]: + async def async_generator(): + async for page in self.pages: + for response in page.data_scan_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst new file mode 100644 index 000000000000..e27965be0a36 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataScanServiceTransport` is the ABC for all transports. +- public child `DataScanServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataScanServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataScanServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataScanServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py new file mode 100644 index 000000000000..b6a6e3c5325a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataScanServiceTransport +from .grpc import DataScanServiceGrpcTransport +from .grpc_asyncio import DataScanServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] +_transport_registry['grpc'] = DataScanServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataScanServiceGrpcAsyncIOTransport + +__all__ = ( + 'DataScanServiceTransport', + 'DataScanServiceGrpcTransport', + 'DataScanServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py new file mode 100644 index 000000000000..bbdd9e2dffed --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataScanServiceTransport(abc.ABC): + """Abstract transport class for DataScanService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_scan: gapic_v1.method.wrap_method( + self.create_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.update_data_scan: gapic_v1.method.wrap_method( + self.update_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_scan: gapic_v1.method.wrap_method( + self.delete_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan: gapic_v1.method.wrap_method( + self.get_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scans: gapic_v1.method.wrap_method( + self.list_data_scans, + default_timeout=None, + client_info=client_info, + ), + self.run_data_scan: gapic_v1.method.wrap_method( + self.run_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan_job: gapic_v1.method.wrap_method( + self.get_data_scan_job, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scan_jobs: gapic_v1.method.wrap_method( + self.list_data_scan_jobs, + default_timeout=None, + client_info=client_info, + ), + self.generate_data_quality_rules: gapic_v1.method.wrap_method( + self.generate_data_quality_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + Union[ + datascans.DataScan, + Awaitable[datascans.DataScan] + ]]: + raise NotImplementedError() + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + Union[ + datascans.ListDataScansResponse, + Awaitable[datascans.ListDataScansResponse] + ]]: + raise NotImplementedError() + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + Union[ + datascans.RunDataScanResponse, + Awaitable[datascans.RunDataScanResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + Union[ + datascans.DataScanJob, + Awaitable[datascans.DataScanJob] + ]]: + raise NotImplementedError() + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + Union[ + datascans.ListDataScanJobsResponse, + Awaitable[datascans.ListDataScanJobsResponse] + ]]: + raise NotImplementedError() + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + Union[ + datascans.GenerateDataQualityRulesResponse, + Awaitable[datascans.GenerateDataQualityRulesResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataScanServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py new file mode 100644 index 000000000000..7fa57f382e72 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py @@ -0,0 +1,615 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO + + +class DataScanServiceGrpcTransport(DataScanServiceTransport): + """gRPC backend transport for DataScanService. + + DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data scan method over gRPC. + + Creates a DataScan resource. + + Returns: + Callable[[~.CreateDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_scan' not in self._stubs: + self._stubs['create_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', + request_serializer=datascans.CreateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_scan'] + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data scan method over gRPC. + + Updates a DataScan resource. + + Returns: + Callable[[~.UpdateDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_scan' not in self._stubs: + self._stubs['update_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', + request_serializer=datascans.UpdateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_scan'] + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data scan method over gRPC. + + Deletes a DataScan resource. + + Returns: + Callable[[~.DeleteDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_scan' not in self._stubs: + self._stubs['delete_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', + request_serializer=datascans.DeleteDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_scan'] + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + datascans.DataScan]: + r"""Return a callable for the get data scan method over gRPC. + + Gets a DataScan resource. + + Returns: + Callable[[~.GetDataScanRequest], + ~.DataScan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan' not in self._stubs: + self._stubs['get_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScan', + request_serializer=datascans.GetDataScanRequest.serialize, + response_deserializer=datascans.DataScan.deserialize, + ) + return self._stubs['get_data_scan'] + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + datascans.ListDataScansResponse]: + r"""Return a callable for the list data scans method over gRPC. + + Lists DataScans. + + Returns: + Callable[[~.ListDataScansRequest], + ~.ListDataScansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scans' not in self._stubs: + self._stubs['list_data_scans'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScans', + request_serializer=datascans.ListDataScansRequest.serialize, + response_deserializer=datascans.ListDataScansResponse.deserialize, + ) + return self._stubs['list_data_scans'] + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + datascans.RunDataScanResponse]: + r"""Return a callable for the run data scan method over gRPC. + + Runs an on-demand execution of a DataScan + + Returns: + Callable[[~.RunDataScanRequest], + ~.RunDataScanResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_data_scan' not in self._stubs: + self._stubs['run_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/RunDataScan', + request_serializer=datascans.RunDataScanRequest.serialize, + response_deserializer=datascans.RunDataScanResponse.deserialize, + ) + return self._stubs['run_data_scan'] + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + datascans.DataScanJob]: + r"""Return a callable for the get data scan job method over gRPC. + + Gets a DataScanJob resource. + + Returns: + Callable[[~.GetDataScanJobRequest], + ~.DataScanJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan_job' not in self._stubs: + self._stubs['get_data_scan_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', + request_serializer=datascans.GetDataScanJobRequest.serialize, + response_deserializer=datascans.DataScanJob.deserialize, + ) + return self._stubs['get_data_scan_job'] + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + datascans.ListDataScanJobsResponse]: + r"""Return a callable for the list data scan jobs method over gRPC. + + Lists DataScanJobs under the given DataScan. + + Returns: + Callable[[~.ListDataScanJobsRequest], + ~.ListDataScanJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scan_jobs' not in self._stubs: + self._stubs['list_data_scan_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', + request_serializer=datascans.ListDataScanJobsRequest.serialize, + response_deserializer=datascans.ListDataScanJobsResponse.deserialize, + ) + return self._stubs['list_data_scan_jobs'] + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + datascans.GenerateDataQualityRulesResponse]: + r"""Return a callable for the generate data quality rules method over gRPC. + + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + Returns: + Callable[[~.GenerateDataQualityRulesRequest], + ~.GenerateDataQualityRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_data_quality_rules' not in self._stubs: + self._stubs['generate_data_quality_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', + request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, + response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, + ) + return self._stubs['generate_data_quality_rules'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataScanServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d93e93a9d327 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py @@ -0,0 +1,706 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataScanServiceGrpcTransport + + +class DataScanServiceGrpcAsyncIOTransport(DataScanServiceTransport): + """gRPC AsyncIO backend transport for DataScanService. + + DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data scan method over gRPC. + + Creates a DataScan resource. + + Returns: + Callable[[~.CreateDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_scan' not in self._stubs: + self._stubs['create_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', + request_serializer=datascans.CreateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_scan'] + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data scan method over gRPC. + + Updates a DataScan resource. + + Returns: + Callable[[~.UpdateDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_scan' not in self._stubs: + self._stubs['update_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', + request_serializer=datascans.UpdateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_scan'] + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data scan method over gRPC. + + Deletes a DataScan resource. + + Returns: + Callable[[~.DeleteDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_scan' not in self._stubs: + self._stubs['delete_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', + request_serializer=datascans.DeleteDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_scan'] + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + Awaitable[datascans.DataScan]]: + r"""Return a callable for the get data scan method over gRPC. + + Gets a DataScan resource. + + Returns: + Callable[[~.GetDataScanRequest], + Awaitable[~.DataScan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan' not in self._stubs: + self._stubs['get_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScan', + request_serializer=datascans.GetDataScanRequest.serialize, + response_deserializer=datascans.DataScan.deserialize, + ) + return self._stubs['get_data_scan'] + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + Awaitable[datascans.ListDataScansResponse]]: + r"""Return a callable for the list data scans method over gRPC. + + Lists DataScans. + + Returns: + Callable[[~.ListDataScansRequest], + Awaitable[~.ListDataScansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scans' not in self._stubs: + self._stubs['list_data_scans'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScans', + request_serializer=datascans.ListDataScansRequest.serialize, + response_deserializer=datascans.ListDataScansResponse.deserialize, + ) + return self._stubs['list_data_scans'] + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + Awaitable[datascans.RunDataScanResponse]]: + r"""Return a callable for the run data scan method over gRPC. + + Runs an on-demand execution of a DataScan + + Returns: + Callable[[~.RunDataScanRequest], + Awaitable[~.RunDataScanResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_data_scan' not in self._stubs: + self._stubs['run_data_scan'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/RunDataScan', + request_serializer=datascans.RunDataScanRequest.serialize, + response_deserializer=datascans.RunDataScanResponse.deserialize, + ) + return self._stubs['run_data_scan'] + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + Awaitable[datascans.DataScanJob]]: + r"""Return a callable for the get data scan job method over gRPC. + + Gets a DataScanJob resource. + + Returns: + Callable[[~.GetDataScanJobRequest], + Awaitable[~.DataScanJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan_job' not in self._stubs: + self._stubs['get_data_scan_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', + request_serializer=datascans.GetDataScanJobRequest.serialize, + response_deserializer=datascans.DataScanJob.deserialize, + ) + return self._stubs['get_data_scan_job'] + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + Awaitable[datascans.ListDataScanJobsResponse]]: + r"""Return a callable for the list data scan jobs method over gRPC. + + Lists DataScanJobs under the given DataScan. + + Returns: + Callable[[~.ListDataScanJobsRequest], + Awaitable[~.ListDataScanJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scan_jobs' not in self._stubs: + self._stubs['list_data_scan_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', + request_serializer=datascans.ListDataScanJobsRequest.serialize, + response_deserializer=datascans.ListDataScanJobsResponse.deserialize, + ) + return self._stubs['list_data_scan_jobs'] + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + Awaitable[datascans.GenerateDataQualityRulesResponse]]: + r"""Return a callable for the generate data quality rules method over gRPC. + + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + Returns: + Callable[[~.GenerateDataQualityRulesRequest], + Awaitable[~.GenerateDataQualityRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_data_quality_rules' not in self._stubs: + self._stubs['generate_data_quality_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', + request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, + response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, + ) + return self._stubs['generate_data_quality_rules'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_scan: self._wrap_method( + self.create_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.update_data_scan: self._wrap_method( + self.update_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_scan: self._wrap_method( + self.delete_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan: self._wrap_method( + self.get_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scans: self._wrap_method( + self.list_data_scans, + default_timeout=None, + client_info=client_info, + ), + self.run_data_scan: self._wrap_method( + self.run_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan_job: self._wrap_method( + self.get_data_scan_job, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scan_jobs: self._wrap_method( + self.list_data_scan_jobs, + default_timeout=None, + client_info=client_info, + ), + self.generate_data_quality_rules: self._wrap_method( + self.generate_data_quality_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataScanServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py new file mode 100644 index 000000000000..d53da31dabe7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataTaxonomyServiceClient +from .async_client import DataTaxonomyServiceAsyncClient + +__all__ = ( + 'DataTaxonomyServiceClient', + 'DataTaxonomyServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py new file mode 100644 index 000000000000..790d7cdf77dc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -0,0 +1,2420 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport +from .client import DataTaxonomyServiceClient + + +class DataTaxonomyServiceAsyncClient: + """DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + """ + + _client: DataTaxonomyServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + + data_attribute_path = staticmethod(DataTaxonomyServiceClient.data_attribute_path) + parse_data_attribute_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_path) + data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.data_attribute_binding_path) + parse_data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_binding_path) + data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.data_taxonomy_path) + parse_data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.parse_data_taxonomy_path) + common_billing_account_path = staticmethod(DataTaxonomyServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataTaxonomyServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataTaxonomyServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataTaxonomyServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataTaxonomyServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataTaxonomyServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataTaxonomyServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataTaxonomyServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataTaxonomyServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataTaxonomyServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceAsyncClient: The constructed client. + """ + return DataTaxonomyServiceClient.from_service_account_info.__func__(DataTaxonomyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceAsyncClient: The constructed client. + """ + return DataTaxonomyServiceClient.from_service_account_file.__func__(DataTaxonomyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataTaxonomyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataTaxonomyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTaxonomyServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataTaxonomyServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data taxonomy service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataTaxonomyServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + data_taxonomy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]]): + The request object. Create DataTaxonomy request. + parent (:class:`str`): + Required. The resource name of the data taxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): + Required. DataTaxonomy resource. + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy_id (:class:`str`): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + + This corresponds to the ``data_taxonomy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): + request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if data_taxonomy_id is not None: + request.data_taxonomy_id = data_taxonomy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, + *, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]]): + The request object. Update DataTaxonomy request. + data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_taxonomy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_taxonomy.name", request.data_taxonomy.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_taxonomy(self, + request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]]): + The request object. Delete DataTaxonomy request. + name (:class:`str`): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): + request = data_taxonomy.DeleteDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_taxonomies(self, + request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataTaxonomiesAsyncPager: + r"""Lists DataTaxonomy resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]]): + The request object. List DataTaxonomies request. + parent (:class:`str`): + Required. The resource name of the DataTaxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager: + List DataTaxonomies response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): + request = data_taxonomy.ListDataTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataTaxonomiesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_taxonomy(self, + request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataTaxonomy: + r"""Retrieves a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]]): + The request object. Get DataTaxonomy request. + name (:class:`str`): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataTaxonomy: + DataTaxonomy represents a set of + hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have + attributes to manage PII data. It is + defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): + request = data_taxonomy.GetDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + data_attribute_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]]): + The request object. Create DataAttributeBinding request. + parent (:class:`str`): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): + Required. DataAttributeBinding + resource. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding_id (:class:`str`): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + + This corresponds to the ``data_attribute_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_attribute_binding, data_attribute_binding_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): + request = data_taxonomy.CreateDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if data_attribute_binding_id is not None: + request.data_attribute_binding_id = data_attribute_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, + *, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]]): + The request object. Update DataAttributeBinding request. + data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_attribute_binding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): + request = data_taxonomy.UpdateDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute_binding.name", request.data_attribute_binding.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]]): + The request object. Delete DataAttributeBinding request. + name (:class:`str`): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): + request = data_taxonomy.DeleteDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_attribute_bindings(self, + request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataAttributeBindingsAsyncPager: + r"""Lists DataAttributeBinding resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]]): + The request object. List DataAttributeBindings request. + parent (:class:`str`): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager: + List DataAttributeBindings response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): + request = data_taxonomy.ListDataAttributeBindingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attribute_bindings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataAttributeBindingsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataAttributeBinding: + r"""Retrieves a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]]): + The request object. Get DataAttributeBinding request. + name (:class:`str`): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataAttributeBinding: + DataAttributeBinding represents + binding of attributes to resources. Eg: + Bind 'CustomerInfo' entity with 'PII' + attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): + request = data_taxonomy.GetDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_data_attribute(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + data_attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]]): + The request object. Create DataAttribute request. + parent (:class:`str`): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): + Required. DataAttribute resource. + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_id (:class:`str`): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + + This corresponds to the ``data_attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_attribute, data_attribute_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): + request = data_taxonomy.CreateDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute is not None: + request.data_attribute = data_attribute + if data_attribute_id is not None: + request.data_attribute_id = data_attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_attribute(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, + *, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]]): + The request object. Update DataAttribute request. + data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_attribute, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): + request = data_taxonomy.UpdateDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute is not None: + request.data_attribute = data_attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute.name", request.data_attribute.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_attribute(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]]): + The request object. Delete DataAttribute request. + name (:class:`str`): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): + request = data_taxonomy.DeleteDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_attributes(self, + request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataAttributesAsyncPager: + r"""Lists Data Attribute resources in a DataTaxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]]): + The request object. List DataAttributes request. + parent (:class:`str`): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager: + List DataAttributes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributesRequest): + request = data_taxonomy.ListDataAttributesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataAttributesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_attribute(self, + request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataAttribute: + r"""Retrieves a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]]): + The request object. Get DataAttribute request. + name (:class:`str`): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataAttribute: + Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeRequest): + request = data_taxonomy.GetDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataTaxonomyServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataTaxonomyServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py new file mode 100644 index 000000000000..599bb5d7fd8f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -0,0 +1,2746 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataTaxonomyServiceGrpcTransport +from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport + + +class DataTaxonomyServiceClientMeta(type): + """Metaclass for the DataTaxonomyService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] + _transport_registry["grpc"] = DataTaxonomyServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataTaxonomyServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataTaxonomyServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataTaxonomyServiceClient(metaclass=DataTaxonomyServiceClientMeta): + """DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataTaxonomyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTaxonomyServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_attribute_path(project: str,location: str,dataTaxonomy: str,data_attribute_id: str,) -> str: + """Returns a fully-qualified data_attribute string.""" + return "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) + + @staticmethod + def parse_data_attribute_path(path: str) -> Dict[str,str]: + """Parses a data_attribute path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)/attributes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_attribute_binding_path(project: str,location: str,data_attribute_binding_id: str,) -> str: + """Returns a fully-qualified data_attribute_binding string.""" + return "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) + + @staticmethod + def parse_data_attribute_binding_path(path: str) -> Dict[str,str]: + """Parses a data_attribute_binding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataAttributeBindings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_taxonomy_path(project: str,location: str,data_taxonomy_id: str,) -> str: + """Returns a fully-qualified data_taxonomy string.""" + return "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) + + @staticmethod + def parse_data_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a data_taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data taxonomy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataTaxonomyServiceClient._read_environment_variables() + self._client_cert_source = DataTaxonomyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataTaxonomyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataTaxonomyServiceTransport) + if transport_provided: + # transport is a DataTaxonomyServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataTaxonomyServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataTaxonomyServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport]] = ( + DataTaxonomyServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataTaxonomyServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + data_taxonomy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]): + The request object. Create DataTaxonomy request. + parent (str): + Required. The resource name of the data taxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. DataTaxonomy resource. + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy_id (str): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + + This corresponds to the ``data_taxonomy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): + request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if data_taxonomy_id is not None: + request.data_taxonomy_id = data_taxonomy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, + *, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]): + The request object. Update DataTaxonomy request. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_taxonomy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_taxonomy.name", request.data_taxonomy.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_taxonomy(self, + request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]): + The request object. Delete DataTaxonomy request. + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): + request = data_taxonomy.DeleteDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_taxonomies(self, + request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataTaxonomiesPager: + r"""Lists DataTaxonomy resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]): + The request object. List DataTaxonomies request. + parent (str): + Required. The resource name of the DataTaxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager: + List DataTaxonomies response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): + request = data_taxonomy.ListDataTaxonomiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataTaxonomiesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_taxonomy(self, + request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataTaxonomy: + r"""Retrieves a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]): + The request object. Get DataTaxonomy request. + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataTaxonomy: + DataTaxonomy represents a set of + hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have + attributes to manage PII data. It is + defined at project level. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): + request = data_taxonomy.GetDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + data_attribute_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]): + The request object. Create DataAttributeBinding request. + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. DataAttributeBinding + resource. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding_id (str): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + + This corresponds to the ``data_attribute_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_attribute_binding, data_attribute_binding_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): + request = data_taxonomy.CreateDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if data_attribute_binding_id is not None: + request.data_attribute_binding_id = data_attribute_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, + *, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]): + The request object. Update DataAttributeBinding request. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_attribute_binding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): + request = data_taxonomy.UpdateDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute_binding.name", request.data_attribute_binding.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]): + The request object. Delete DataAttributeBinding request. + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): + request = data_taxonomy.DeleteDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_attribute_bindings(self, + request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataAttributeBindingsPager: + r"""Lists DataAttributeBinding resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]): + The request object. List DataAttributeBindings request. + parent (str): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager: + List DataAttributeBindings response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): + request = data_taxonomy.ListDataAttributeBindingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_attribute_bindings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataAttributeBindingsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataAttributeBinding: + r"""Retrieves a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]): + The request object. Get DataAttributeBinding request. + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataAttributeBinding: + DataAttributeBinding represents + binding of attributes to resources. Eg: + Bind 'CustomerInfo' entity with 'PII' + attribute. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): + request = data_taxonomy.GetDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_data_attribute(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + data_attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]): + The request object. Create DataAttribute request. + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. DataAttribute resource. + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_id (str): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + + This corresponds to the ``data_attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_attribute, data_attribute_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): + request = data_taxonomy.CreateDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute is not None: + request.data_attribute = data_attribute + if data_attribute_id is not None: + request.data_attribute_id = data_attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_attribute(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, + *, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]): + The request object. Update DataAttribute request. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_attribute, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): + request = data_taxonomy.UpdateDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute is not None: + request.data_attribute = data_attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute.name", request.data_attribute.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_attribute(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]): + The request object. Delete DataAttribute request. + name (str): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): + request = data_taxonomy.DeleteDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_attributes(self, + request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataAttributesPager: + r"""Lists Data Attribute resources in a DataTaxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]): + The request object. List DataAttributes request. + parent (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager: + List DataAttributes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributesRequest): + request = data_taxonomy.ListDataAttributesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataAttributesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_attribute(self, + request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_taxonomy.DataAttribute: + r"""Retrieves a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]): + The request object. Get DataAttribute request. + name (str): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.DataAttribute: + Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeRequest): + request = data_taxonomy.GetDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataTaxonomyServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataTaxonomyServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py new file mode 100644 index 000000000000..787b71f82ce8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy + + +class ListDataTaxonomiesPager: + """A pager for iterating through ``list_data_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataTaxonomies`` requests and continue to iterate + through the ``data_taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataTaxonomiesResponse], + request: data_taxonomy.ListDataTaxonomiesRequest, + response: data_taxonomy.ListDataTaxonomiesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataTaxonomiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataTaxonomy]: + for page in self.pages: + yield from page.data_taxonomies + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataTaxonomiesAsyncPager: + """A pager for iterating through ``list_data_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataTaxonomies`` requests and continue to iterate + through the ``data_taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataTaxonomiesResponse]], + request: data_taxonomy.ListDataTaxonomiesRequest, + response: data_taxonomy.ListDataTaxonomiesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataTaxonomiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataTaxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.data_taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributeBindingsPager: + """A pager for iterating through ``list_data_attribute_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_attribute_bindings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataAttributeBindings`` requests and continue to iterate + through the ``data_attribute_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataAttributeBindingsResponse], + request: data_taxonomy.ListDataAttributeBindingsRequest, + response: data_taxonomy.ListDataAttributeBindingsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataAttributeBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataAttributeBinding]: + for page in self.pages: + yield from page.data_attribute_bindings + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributeBindingsAsyncPager: + """A pager for iterating through ``list_data_attribute_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_attribute_bindings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataAttributeBindings`` requests and continue to iterate + through the ``data_attribute_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]], + request: data_taxonomy.ListDataAttributeBindingsRequest, + response: data_taxonomy.ListDataAttributeBindingsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributeBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttributeBinding]: + async def async_generator(): + async for page in self.pages: + for response in page.data_attribute_bindings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributesPager: + """A pager for iterating through ``list_data_attributes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_attributes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataAttributes`` requests and continue to iterate + through the ``data_attributes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataAttributesResponse], + request: data_taxonomy.ListDataAttributesRequest, + response: data_taxonomy.ListDataAttributesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataAttributesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataAttribute]: + for page in self.pages: + yield from page.data_attributes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributesAsyncPager: + """A pager for iterating through ``list_data_attributes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_attributes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataAttributes`` requests and continue to iterate + through the ``data_attributes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataAttributesResponse]], + request: data_taxonomy.ListDataAttributesRequest, + response: data_taxonomy.ListDataAttributesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttribute]: + async def async_generator(): + async for page in self.pages: + for response in page.data_attributes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst new file mode 100644 index 000000000000..5c194fc01362 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataTaxonomyServiceTransport` is the ABC for all transports. +- public child `DataTaxonomyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataTaxonomyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataTaxonomyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataTaxonomyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py new file mode 100644 index 000000000000..aeab503c70d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataTaxonomyServiceTransport +from .grpc import DataTaxonomyServiceGrpcTransport +from .grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] +_transport_registry['grpc'] = DataTaxonomyServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataTaxonomyServiceGrpcAsyncIOTransport + +__all__ = ( + 'DataTaxonomyServiceTransport', + 'DataTaxonomyServiceGrpcTransport', + 'DataTaxonomyServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py new file mode 100644 index 000000000000..526e35b387b1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataTaxonomyServiceTransport(abc.ABC): + """Abstract transport class for DataTaxonomyService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_taxonomy: gapic_v1.method.wrap_method( + self.create_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_data_taxonomy: gapic_v1.method.wrap_method( + self.update_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_taxonomy: gapic_v1.method.wrap_method( + self.delete_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_data_taxonomies: gapic_v1.method.wrap_method( + self.list_data_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_data_taxonomy: gapic_v1.method.wrap_method( + self.get_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute_binding: gapic_v1.method.wrap_method( + self.create_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute_binding: gapic_v1.method.wrap_method( + self.update_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute_binding: gapic_v1.method.wrap_method( + self.delete_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attribute_bindings: gapic_v1.method.wrap_method( + self.list_data_attribute_bindings, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute_binding: gapic_v1.method.wrap_method( + self.get_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute: gapic_v1.method.wrap_method( + self.create_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute: gapic_v1.method.wrap_method( + self.update_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute: gapic_v1.method.wrap_method( + self.delete_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attributes: gapic_v1.method.wrap_method( + self.list_data_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute: gapic_v1.method.wrap_method( + self.get_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + Union[ + data_taxonomy.ListDataTaxonomiesResponse, + Awaitable[data_taxonomy.ListDataTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + Union[ + data_taxonomy.DataTaxonomy, + Awaitable[data_taxonomy.DataTaxonomy] + ]]: + raise NotImplementedError() + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + Union[ + data_taxonomy.ListDataAttributeBindingsResponse, + Awaitable[data_taxonomy.ListDataAttributeBindingsResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + Union[ + data_taxonomy.DataAttributeBinding, + Awaitable[data_taxonomy.DataAttributeBinding] + ]]: + raise NotImplementedError() + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + Union[ + data_taxonomy.ListDataAttributesResponse, + Awaitable[data_taxonomy.ListDataAttributesResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + Union[ + data_taxonomy.DataAttribute, + Awaitable[data_taxonomy.DataAttribute] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataTaxonomyServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py new file mode 100644 index 000000000000..d7703acdda72 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py @@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO + + +class DataTaxonomyServiceGrpcTransport(DataTaxonomyServiceTransport): + """gRPC backend transport for DataTaxonomyService. + + DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data taxonomy method over gRPC. + + Create a DataTaxonomy resource. + + Returns: + Callable[[~.CreateDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_taxonomy' not in self._stubs: + self._stubs['create_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', + request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_taxonomy'] + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data taxonomy method over gRPC. + + Updates a DataTaxonomy resource. + + Returns: + Callable[[~.UpdateDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_taxonomy' not in self._stubs: + self._stubs['update_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', + request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_taxonomy'] + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data taxonomy method over gRPC. + + Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + Returns: + Callable[[~.DeleteDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_taxonomy' not in self._stubs: + self._stubs['delete_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', + request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_taxonomy'] + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + data_taxonomy.ListDataTaxonomiesResponse]: + r"""Return a callable for the list data taxonomies method over gRPC. + + Lists DataTaxonomy resources in a project and + location. + + Returns: + Callable[[~.ListDataTaxonomiesRequest], + ~.ListDataTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_taxonomies' not in self._stubs: + self._stubs['list_data_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', + request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, + response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, + ) + return self._stubs['list_data_taxonomies'] + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + data_taxonomy.DataTaxonomy]: + r"""Return a callable for the get data taxonomy method over gRPC. + + Retrieves a DataTaxonomy resource. + + Returns: + Callable[[~.GetDataTaxonomyRequest], + ~.DataTaxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_taxonomy' not in self._stubs: + self._stubs['get_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', + request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, + response_deserializer=data_taxonomy.DataTaxonomy.deserialize, + ) + return self._stubs['get_data_taxonomy'] + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data attribute binding method over gRPC. + + Create a DataAttributeBinding resource. + + Returns: + Callable[[~.CreateDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute_binding' not in self._stubs: + self._stubs['create_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', + request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute_binding'] + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data attribute binding method over gRPC. + + Updates a DataAttributeBinding resource. + + Returns: + Callable[[~.UpdateDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute_binding' not in self._stubs: + self._stubs['update_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', + request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute_binding'] + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data attribute binding method over gRPC. + + Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + Returns: + Callable[[~.DeleteDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute_binding' not in self._stubs: + self._stubs['delete_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', + request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute_binding'] + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + data_taxonomy.ListDataAttributeBindingsResponse]: + r"""Return a callable for the list data attribute bindings method over gRPC. + + Lists DataAttributeBinding resources in a project and + location. + + Returns: + Callable[[~.ListDataAttributeBindingsRequest], + ~.ListDataAttributeBindingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attribute_bindings' not in self._stubs: + self._stubs['list_data_attribute_bindings'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', + request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, + ) + return self._stubs['list_data_attribute_bindings'] + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + data_taxonomy.DataAttributeBinding]: + r"""Return a callable for the get data attribute binding method over gRPC. + + Retrieves a DataAttributeBinding resource. + + Returns: + Callable[[~.GetDataAttributeBindingRequest], + ~.DataAttributeBinding]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute_binding' not in self._stubs: + self._stubs['get_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', + request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, + response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, + ) + return self._stubs['get_data_attribute_binding'] + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data attribute method over gRPC. + + Create a DataAttribute resource. + + Returns: + Callable[[~.CreateDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute' not in self._stubs: + self._stubs['create_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', + request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute'] + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data attribute method over gRPC. + + Updates a DataAttribute resource. + + Returns: + Callable[[~.UpdateDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute' not in self._stubs: + self._stubs['update_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', + request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute'] + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data attribute method over gRPC. + + Deletes a Data Attribute resource. + + Returns: + Callable[[~.DeleteDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute' not in self._stubs: + self._stubs['delete_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', + request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute'] + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + data_taxonomy.ListDataAttributesResponse]: + r"""Return a callable for the list data attributes method over gRPC. + + Lists Data Attribute resources in a DataTaxonomy. + + Returns: + Callable[[~.ListDataAttributesRequest], + ~.ListDataAttributesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attributes' not in self._stubs: + self._stubs['list_data_attributes'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', + request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, + ) + return self._stubs['list_data_attributes'] + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + data_taxonomy.DataAttribute]: + r"""Return a callable for the get data attribute method over gRPC. + + Retrieves a Data Attribute resource. + + Returns: + Callable[[~.GetDataAttributeRequest], + ~.DataAttribute]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute' not in self._stubs: + self._stubs['get_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', + request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, + response_deserializer=data_taxonomy.DataAttribute.deserialize, + ) + return self._stubs['get_data_attribute'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataTaxonomyServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a8b233e044c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py @@ -0,0 +1,894 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataTaxonomyServiceGrpcTransport + + +class DataTaxonomyServiceGrpcAsyncIOTransport(DataTaxonomyServiceTransport): + """gRPC AsyncIO backend transport for DataTaxonomyService. + + DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data taxonomy method over gRPC. + + Create a DataTaxonomy resource. + + Returns: + Callable[[~.CreateDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_taxonomy' not in self._stubs: + self._stubs['create_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', + request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_taxonomy'] + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data taxonomy method over gRPC. + + Updates a DataTaxonomy resource. + + Returns: + Callable[[~.UpdateDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_taxonomy' not in self._stubs: + self._stubs['update_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', + request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_taxonomy'] + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data taxonomy method over gRPC. + + Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + Returns: + Callable[[~.DeleteDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_taxonomy' not in self._stubs: + self._stubs['delete_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', + request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_taxonomy'] + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + Awaitable[data_taxonomy.ListDataTaxonomiesResponse]]: + r"""Return a callable for the list data taxonomies method over gRPC. + + Lists DataTaxonomy resources in a project and + location. + + Returns: + Callable[[~.ListDataTaxonomiesRequest], + Awaitable[~.ListDataTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_taxonomies' not in self._stubs: + self._stubs['list_data_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', + request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, + response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, + ) + return self._stubs['list_data_taxonomies'] + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + Awaitable[data_taxonomy.DataTaxonomy]]: + r"""Return a callable for the get data taxonomy method over gRPC. + + Retrieves a DataTaxonomy resource. + + Returns: + Callable[[~.GetDataTaxonomyRequest], + Awaitable[~.DataTaxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_taxonomy' not in self._stubs: + self._stubs['get_data_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', + request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, + response_deserializer=data_taxonomy.DataTaxonomy.deserialize, + ) + return self._stubs['get_data_taxonomy'] + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data attribute binding method over gRPC. + + Create a DataAttributeBinding resource. + + Returns: + Callable[[~.CreateDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute_binding' not in self._stubs: + self._stubs['create_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', + request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute_binding'] + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data attribute binding method over gRPC. + + Updates a DataAttributeBinding resource. + + Returns: + Callable[[~.UpdateDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute_binding' not in self._stubs: + self._stubs['update_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', + request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute_binding'] + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data attribute binding method over gRPC. + + Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + Returns: + Callable[[~.DeleteDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute_binding' not in self._stubs: + self._stubs['delete_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', + request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute_binding'] + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]]: + r"""Return a callable for the list data attribute bindings method over gRPC. + + Lists DataAttributeBinding resources in a project and + location. + + Returns: + Callable[[~.ListDataAttributeBindingsRequest], + Awaitable[~.ListDataAttributeBindingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attribute_bindings' not in self._stubs: + self._stubs['list_data_attribute_bindings'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', + request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, + ) + return self._stubs['list_data_attribute_bindings'] + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + Awaitable[data_taxonomy.DataAttributeBinding]]: + r"""Return a callable for the get data attribute binding method over gRPC. + + Retrieves a DataAttributeBinding resource. + + Returns: + Callable[[~.GetDataAttributeBindingRequest], + Awaitable[~.DataAttributeBinding]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute_binding' not in self._stubs: + self._stubs['get_data_attribute_binding'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', + request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, + response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, + ) + return self._stubs['get_data_attribute_binding'] + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data attribute method over gRPC. + + Create a DataAttribute resource. + + Returns: + Callable[[~.CreateDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute' not in self._stubs: + self._stubs['create_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', + request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute'] + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data attribute method over gRPC. + + Updates a DataAttribute resource. + + Returns: + Callable[[~.UpdateDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute' not in self._stubs: + self._stubs['update_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', + request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute'] + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data attribute method over gRPC. + + Deletes a Data Attribute resource. + + Returns: + Callable[[~.DeleteDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute' not in self._stubs: + self._stubs['delete_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', + request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute'] + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + Awaitable[data_taxonomy.ListDataAttributesResponse]]: + r"""Return a callable for the list data attributes method over gRPC. + + Lists Data Attribute resources in a DataTaxonomy. + + Returns: + Callable[[~.ListDataAttributesRequest], + Awaitable[~.ListDataAttributesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attributes' not in self._stubs: + self._stubs['list_data_attributes'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', + request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, + ) + return self._stubs['list_data_attributes'] + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + Awaitable[data_taxonomy.DataAttribute]]: + r"""Return a callable for the get data attribute method over gRPC. + + Retrieves a Data Attribute resource. + + Returns: + Callable[[~.GetDataAttributeRequest], + Awaitable[~.DataAttribute]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute' not in self._stubs: + self._stubs['get_data_attribute'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', + request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, + response_deserializer=data_taxonomy.DataAttribute.deserialize, + ) + return self._stubs['get_data_attribute'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_taxonomy: self._wrap_method( + self.create_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_data_taxonomy: self._wrap_method( + self.update_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_taxonomy: self._wrap_method( + self.delete_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_data_taxonomies: self._wrap_method( + self.list_data_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_data_taxonomy: self._wrap_method( + self.get_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute_binding: self._wrap_method( + self.create_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute_binding: self._wrap_method( + self.update_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute_binding: self._wrap_method( + self.delete_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attribute_bindings: self._wrap_method( + self.list_data_attribute_bindings, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute_binding: self._wrap_method( + self.get_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute: self._wrap_method( + self.create_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute: self._wrap_method( + self.update_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute: self._wrap_method( + self.delete_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attributes: self._wrap_method( + self.list_data_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute: self._wrap_method( + self.get_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataTaxonomyServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py new file mode 100644 index 000000000000..26191ecad898 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataplexServiceClient +from .async_client import DataplexServiceAsyncClient + +__all__ = ( + 'DataplexServiceClient', + 'DataplexServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py new file mode 100644 index 000000000000..bcfddabeab46 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -0,0 +1,4580 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport +from .client import DataplexServiceClient + + +class DataplexServiceAsyncClient: + """Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + """ + + _client: DataplexServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataplexServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataplexServiceClient._DEFAULT_UNIVERSE + + action_path = staticmethod(DataplexServiceClient.action_path) + parse_action_path = staticmethod(DataplexServiceClient.parse_action_path) + asset_path = staticmethod(DataplexServiceClient.asset_path) + parse_asset_path = staticmethod(DataplexServiceClient.parse_asset_path) + environment_path = staticmethod(DataplexServiceClient.environment_path) + parse_environment_path = staticmethod(DataplexServiceClient.parse_environment_path) + job_path = staticmethod(DataplexServiceClient.job_path) + parse_job_path = staticmethod(DataplexServiceClient.parse_job_path) + lake_path = staticmethod(DataplexServiceClient.lake_path) + parse_lake_path = staticmethod(DataplexServiceClient.parse_lake_path) + session_path = staticmethod(DataplexServiceClient.session_path) + parse_session_path = staticmethod(DataplexServiceClient.parse_session_path) + task_path = staticmethod(DataplexServiceClient.task_path) + parse_task_path = staticmethod(DataplexServiceClient.parse_task_path) + zone_path = staticmethod(DataplexServiceClient.zone_path) + parse_zone_path = staticmethod(DataplexServiceClient.parse_zone_path) + common_billing_account_path = staticmethod(DataplexServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataplexServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataplexServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataplexServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataplexServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataplexServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataplexServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataplexServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataplexServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataplexServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceAsyncClient: The constructed client. + """ + return DataplexServiceClient.from_service_account_info.__func__(DataplexServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceAsyncClient: The constructed client. + """ + return DataplexServiceClient.from_service_account_file.__func__(DataplexServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataplexServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataplexServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataplexServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataplexServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataplex service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataplexServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_lake(self, + request: Optional[Union[service.CreateLakeRequest, dict]] = None, + *, + parent: Optional[str] = None, + lake: Optional[resources.Lake] = None, + lake_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]]): + The request object. Create lake request. + parent (:class:`str`): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake (:class:`google.cloud.dataplex_v1.types.Lake`): + Required. Lake resource + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake_id (:class:`str`): + Required. Lake identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / + location. + + This corresponds to the ``lake_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lake, lake_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateLakeRequest): + request = service.CreateLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lake is not None: + request.lake = lake + if lake_id is not None: + request.lake_id = lake_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_lake(self, + request: Optional[Union[service.UpdateLakeRequest, dict]] = None, + *, + lake: Optional[resources.Lake] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]]): + The request object. Update lake request. + lake (:class:`google.cloud.dataplex_v1.types.Lake`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([lake, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateLakeRequest): + request = service.UpdateLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lake is not None: + request.lake = lake + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("lake.name", request.lake.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_lake(self, + request: Optional[Union[service.DeleteLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]]): + The request object. Delete lake request. + name (:class:`str`): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteLakeRequest): + request = service.DeleteLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_lakes(self, + request: Optional[Union[service.ListLakesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLakesAsyncPager: + r"""Lists lake resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]]): + The request object. List lakes request. + parent (:class:`str`): + Required. The resource name of the lake location, of the + form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager: + List lakes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakesRequest): + request = service.ListLakesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_lakes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLakesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_lake(self, + request: Optional[Union[service.GetLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Lake: + r"""Retrieves a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lake(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]]): + The request object. Get lake request. + name (:class:`str`): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Lake: + A lake is a centralized repository + for managing enterprise data across the + organization distributed across many + cloud projects, and stored in a variety + of storage services such as Google Cloud + Storage and BigQuery. The resources + attached to a lake are referred to as + managed resources. Data within these + managed resources can be structured or + unstructured. A lake provides data + admins with tools to organize, secure + and manage their data at scale, and + provides data scientists and data + engineers an integrated experience to + easily search, discover, analyze and + transform data and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetLakeRequest): + request = service.GetLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_lake_actions(self, + request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLakeActionsAsyncPager: + r"""Lists action resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]]): + The request object. List lake actions request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakeActionsRequest): + request = service.ListLakeActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_lake_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLakeActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_zone(self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a zone resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]]): + The request object. Create zone request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`google.cloud.dataplex_v1.types.Zone`): + Required. Zone resource. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (:class:`str`): + Required. Zone identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in + a project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, zone, zone_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_zone(self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]]): + The request object. Update zone request. + zone (:class:`google.cloud.dataplex_v1.types.Zone`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([zone, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("zone.name", request.zone.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_zone(self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]]): + The request object. Delete zone request. + name (:class:`str`): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_zones(self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZonesAsyncPager: + r"""Lists zone resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]]): + The request object. List zones request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager: + List zones response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_zones] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListZonesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_zone(self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Zone: + r"""Retrieves a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]]): + The request object. Get zone request. + name (:class:`str`): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Zone: + A zone represents a logical group of + related assets within a lake. A zone can + be used to map to organizational + structure or represent stages of data + readiness from raw to curated. It + provides managing behavior that is + shared or inherited by all contained + assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_zone_actions(self, + request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZoneActionsAsyncPager: + r"""Lists action resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]]): + The request object. List zone actions request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZoneActionsRequest): + request = service.ListZoneActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_zone_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListZoneActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_asset(self, + request: Optional[Union[service.CreateAssetRequest, dict]] = None, + *, + parent: Optional[str] = None, + asset: Optional[resources.Asset] = None, + asset_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]]): + The request object. Create asset request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset (:class:`google.cloud.dataplex_v1.types.Asset`): + Required. Asset resource. + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_id (:class:`str`): + Required. Asset identifier. This ID will be used to + generate names such as table names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + + This corresponds to the ``asset_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, asset, asset_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateAssetRequest): + request = service.CreateAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if asset is not None: + request.asset = asset + if asset_id is not None: + request.asset_id = asset_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_asset(self, + request: Optional[Union[service.UpdateAssetRequest, dict]] = None, + *, + asset: Optional[resources.Asset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]]): + The request object. Update asset request. + asset (:class:`google.cloud.dataplex_v1.types.Asset`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([asset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateAssetRequest): + request = service.UpdateAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if asset is not None: + request.asset = asset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("asset.name", request.asset.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_asset(self, + request: Optional[Union[service.DeleteAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]]): + The request object. Delete asset request. + name (:class:`str`): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteAssetRequest): + request = service.DeleteAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_assets(self, + request: Optional[Union[service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetsAsyncPager: + r"""Lists asset resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]]): + The request object. List assets request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager: + List assets response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetsRequest): + request = service.ListAssetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssetsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_asset(self, + request: Optional[Union[service.GetAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Asset: + r"""Retrieves an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_asset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]]): + The request object. Get asset request. + name (:class:`str`): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Asset: + An asset represents a cloud resource + that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetAssetRequest): + request = service.GetAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_asset_actions(self, + request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetActionsAsyncPager: + r"""Lists action resources in an asset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]]): + The request object. List asset actions request. + parent (:class:`str`): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetActionsRequest): + request = service.ListAssetActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_asset_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssetActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task(self, + request: Optional[Union[service.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[tasks.Task] = None, + task_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a task resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]]): + The request object. Create task request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.dataplex_v1.types.Task`): + Required. Task resource. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (:class:`str`): + Required. Task identifier. + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task, task_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTaskRequest): + request = service.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + if task_id is not None: + request.task_id = task_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_task(self, + request: Optional[Union[service.UpdateTaskRequest, dict]] = None, + *, + task: Optional[tasks.Task] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]]): + The request object. Update task request. + task (:class:`google.cloud.dataplex_v1.types.Task`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([task, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTaskRequest): + request = service.UpdateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if task is not None: + request.task = task + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("task.name", request.task.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_task(self, + request: Optional[Union[service.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]]): + The request object. Delete task request. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTaskRequest): + request = service.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_tasks(self, + request: Optional[Union[service.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists tasks under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]]): + The request object. List tasks request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager: + List tasks response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTasksRequest): + request = service.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task(self, + request: Optional[Union[service.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tasks.Task: + r"""Get task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]]): + The request object. Get task request. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Task: + A task represents a user-visible job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTaskRequest): + request = service.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: Optional[Union[service.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists Jobs under the given task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]]): + The request object. List jobs request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager: + List jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListJobsRequest): + request = service.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_task(self, + request: Optional[Union[service.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.RunTaskResponse: + r"""Run an on demand execution of a Task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]]): + The request object. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.RunTaskResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RunTaskRequest): + request = service.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: Optional[Union[service.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tasks.Job: + r"""Get job resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]]): + The request object. Get job request. + name (:class:`str`): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Job: + A job represents an instance of a + task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetJobRequest): + request = service.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_job(self, + request: Optional[Union[service.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancel jobs running for the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]]): + The request object. Cancel task jobs. + name (:class:`str`): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CancelJobRequest): + request = service.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_environment(self, + request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + environment: Optional[analyze.Environment] = None, + environment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]]): + The request object. Create environment request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (:class:`str`): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_environment(self, + request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, + *, + environment: Optional[analyze.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]]): + The request object. Update environment request. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("environment.name", request.environment.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_environment(self, + request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]]): + The request object. Delete environment request. + name (:class:`str`): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_environments(self, + request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsAsyncPager: + r"""Lists environments under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]]): + The request object. List environments request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: + List environments response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnvironmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_environment(self, + request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]]): + The request object. Get environment request. + name (:class:`str`): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions(self, + request: Optional[Union[service.ListSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists session resources in an environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]]): + The request object. List sessions request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: + List sessions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataplexServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataplexServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py new file mode 100644 index 000000000000..81c523bf4d5d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -0,0 +1,4933 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataplexServiceGrpcTransport +from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport + + +class DataplexServiceClientMeta(type): + """Metaclass for the DataplexService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] + _transport_registry["grpc"] = DataplexServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataplexServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataplexServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataplexServiceClient(metaclass=DataplexServiceClientMeta): + """Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataplexServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataplexServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def action_path(project: str,location: str,lake: str,action: str,) -> str: + """Returns a fully-qualified action string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) + + @staticmethod + def parse_action_path(path: str) -> Dict[str,str]: + """Parses a action path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/actions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def asset_path(project: str,location: str,lake: str,zone: str,asset: str,) -> str: + """Returns a fully-qualified asset string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) + + @staticmethod + def parse_asset_path(path: str) -> Dict[str,str]: + """Parses a asset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/assets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def environment_path(project: str,location: str,lake: str,environment: str,) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str,str]: + """Parses a environment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_path(project: str,location: str,lake: str,task: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def lake_path(project: str,location: str,lake: str,) -> str: + """Returns a fully-qualified lake string.""" + return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + + @staticmethod + def parse_lake_path(path: str) -> Dict[str,str]: + """Parses a lake path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def session_path(project: str,location: str,lake: str,environment: str,session: str,) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str,str]: + """Parses a session path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)/sessions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str,location: str,lake: str,task: str,) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str,str]: + """Parses a task path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def zone_path(project: str,location: str,lake: str,zone: str,) -> str: + """Returns a fully-qualified zone string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + + @staticmethod + def parse_zone_path(path: str) -> Dict[str,str]: + """Parses a zone path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataplexServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataplex service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataplexServiceClient._read_environment_variables() + self._client_cert_source = DataplexServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataplexServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataplexServiceTransport) + if transport_provided: + # transport is a DataplexServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataplexServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataplexServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport]] = ( + DataplexServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataplexServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_lake(self, + request: Optional[Union[service.CreateLakeRequest, dict]] = None, + *, + parent: Optional[str] = None, + lake: Optional[resources.Lake] = None, + lake_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]): + The request object. Create lake request. + parent (str): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Lake resource + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake_id (str): + Required. Lake identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / + location. + + This corresponds to the ``lake_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lake, lake_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateLakeRequest): + request = service.CreateLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lake is not None: + request.lake = lake + if lake_id is not None: + request.lake_id = lake_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_lake(self, + request: Optional[Union[service.UpdateLakeRequest, dict]] = None, + *, + lake: Optional[resources.Lake] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]): + The request object. Update lake request. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([lake, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateLakeRequest): + request = service.UpdateLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lake is not None: + request.lake = lake + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("lake.name", request.lake.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_lake(self, + request: Optional[Union[service.DeleteLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]): + The request object. Delete lake request. + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteLakeRequest): + request = service.DeleteLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_lakes(self, + request: Optional[Union[service.ListLakesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLakesPager: + r"""Lists lake resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]): + The request object. List lakes request. + parent (str): + Required. The resource name of the lake location, of the + form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager: + List lakes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakesRequest): + request = service.ListLakesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_lakes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLakesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_lake(self, + request: Optional[Union[service.GetLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Lake: + r"""Retrieves a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = client.get_lake(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]): + The request object. Get lake request. + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Lake: + A lake is a centralized repository + for managing enterprise data across the + organization distributed across many + cloud projects, and stored in a variety + of storage services such as Google Cloud + Storage and BigQuery. The resources + attached to a lake are referred to as + managed resources. Data within these + managed resources can be structured or + unstructured. A lake provides data + admins with tools to organize, secure + and manage their data at scale, and + provides data scientists and data + engineers an integrated experience to + easily search, discover, analyze and + transform data and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetLakeRequest): + request = service.GetLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_lake_actions(self, + request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLakeActionsPager: + r"""Lists action resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]): + The request object. List lake actions request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakeActionsRequest): + request = service.ListLakeActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_lake_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLakeActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_zone(self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a zone resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]): + The request object. Create zone request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Zone resource. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (str): + Required. Zone identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in + a project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, zone, zone_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_zone(self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]): + The request object. Update zone request. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([zone, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("zone.name", request.zone.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_zone(self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]): + The request object. Delete zone request. + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_zones(self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZonesPager: + r"""Lists zone resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]): + The request object. List zones request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager: + List zones response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_zones] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListZonesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_zone(self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Zone: + r"""Retrieves a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]): + The request object. Get zone request. + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Zone: + A zone represents a logical group of + related assets within a lake. A zone can + be used to map to organizational + structure or represent stages of data + readiness from raw to curated. It + provides managing behavior that is + shared or inherited by all contained + assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_zone_actions(self, + request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListZoneActionsPager: + r"""Lists action resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]): + The request object. List zone actions request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZoneActionsRequest): + request = service.ListZoneActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_zone_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListZoneActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_asset(self, + request: Optional[Union[service.CreateAssetRequest, dict]] = None, + *, + parent: Optional[str] = None, + asset: Optional[resources.Asset] = None, + asset_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]): + The request object. Create asset request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Asset resource. + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_id (str): + Required. Asset identifier. This ID will be used to + generate names such as table names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + + This corresponds to the ``asset_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, asset, asset_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateAssetRequest): + request = service.CreateAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if asset is not None: + request.asset = asset + if asset_id is not None: + request.asset_id = asset_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_asset(self, + request: Optional[Union[service.UpdateAssetRequest, dict]] = None, + *, + asset: Optional[resources.Asset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]): + The request object. Update asset request. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([asset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateAssetRequest): + request = service.UpdateAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if asset is not None: + request.asset = asset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("asset.name", request.asset.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_asset(self, + request: Optional[Union[service.DeleteAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]): + The request object. Delete asset request. + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteAssetRequest): + request = service.DeleteAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_assets(self, + request: Optional[Union[service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetsPager: + r"""Lists asset resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]): + The request object. List assets request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager: + List assets response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetsRequest): + request = service.ListAssetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_asset(self, + request: Optional[Union[service.GetAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Asset: + r"""Retrieves an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = client.get_asset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]): + The request object. Get asset request. + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Asset: + An asset represents a cloud resource + that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetAssetRequest): + request = service.GetAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_asset_actions(self, + request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetActionsPager: + r"""Lists action resources in an asset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]): + The request object. List asset actions request. + parent (str): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetActionsRequest): + request = service.ListAssetActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_asset_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssetActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task(self, + request: Optional[Union[service.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[tasks.Task] = None, + task_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a task resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]): + The request object. Create task request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.dataplex_v1.types.Task): + Required. Task resource. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (str): + Required. Task identifier. + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task, task_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTaskRequest): + request = service.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + if task_id is not None: + request.task_id = task_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_task(self, + request: Optional[Union[service.UpdateTaskRequest, dict]] = None, + *, + task: Optional[tasks.Task] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]): + The request object. Update task request. + task (google.cloud.dataplex_v1.types.Task): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([task, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTaskRequest): + request = service.UpdateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if task is not None: + request.task = task + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("task.name", request.task.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_task(self, + request: Optional[Union[service.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]): + The request object. Delete task request. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTaskRequest): + request = service.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_tasks(self, + request: Optional[Union[service.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists tasks under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]): + The request object. List tasks request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager: + List tasks response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTasksRequest): + request = service.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task(self, + request: Optional[Union[service.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tasks.Task: + r"""Get task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]): + The request object. Get task request. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Task: + A task represents a user-visible job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTaskRequest): + request = service.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: Optional[Union[service.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists Jobs under the given task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]): + The request object. List jobs request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager: + List jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListJobsRequest): + request = service.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_task(self, + request: Optional[Union[service.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.RunTaskResponse: + r"""Run an on demand execution of a Task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]): + The request object. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.RunTaskResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RunTaskRequest): + request = service.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: Optional[Union[service.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tasks.Job: + r"""Get job resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]): + The request object. Get job request. + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Job: + A job represents an instance of a + task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetJobRequest): + request = service.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_job(self, + request: Optional[Union[service.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancel jobs running for the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]): + The request object. Cancel task jobs. + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CancelJobRequest): + request = service.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_environment(self, + request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + environment: Optional[analyze.Environment] = None, + environment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create an environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): + The request object. Create environment request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_environment(self, + request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, + *, + environment: Optional[analyze.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update the environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): + The request object. Update environment request. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("environment.name", request.environment.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_environment(self, + request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): + The request object. Delete environment request. + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_environments(self, + request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsPager: + r"""Lists environments under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): + The request object. List environments request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: + List environments response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnvironmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_environment(self, + request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): + The request object. Get environment request. + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions(self, + request: Optional[Union[service.ListSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists session resources in an environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): + The request object. List sessions request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: + List sessions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataplexServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataplexServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py new file mode 100644 index 000000000000..394c731b72c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py @@ -0,0 +1,1380 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks + + +class ListLakesPager: + """A pager for iterating through ``list_lakes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``lakes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLakes`` requests and continue to iterate + through the ``lakes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListLakesResponse], + request: service.ListLakesRequest, + response: service.ListLakesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListLakesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListLakesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListLakesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Lake]: + for page in self.pages: + yield from page.lakes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakesAsyncPager: + """A pager for iterating through ``list_lakes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``lakes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLakes`` requests and continue to iterate + through the ``lakes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListLakesResponse]], + request: service.ListLakesRequest, + response: service.ListLakesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListLakesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListLakesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListLakesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Lake]: + async def async_generator(): + async for page in self.pages: + for response in page.lakes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakeActionsPager: + """A pager for iterating through ``list_lake_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLakeActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListLakeActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListLakeActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakeActionsAsyncPager: + """A pager for iterating through ``list_lake_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLakeActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListLakeActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListLakeActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZonesPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListZonesResponse], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZonesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListZonesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Zone]: + for page in self.pages: + yield from page.zones + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZonesAsyncPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListZonesResponse]], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZonesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListZonesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Zone]: + async def async_generator(): + async for page in self.pages: + for response in page.zones: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZoneActionsPager: + """A pager for iterating through ``list_zone_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListZoneActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListZoneActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZoneActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZoneActionsAsyncPager: + """A pager for iterating through ``list_zone_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListZoneActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListZoneActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListZoneActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetsPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListAssetsResponse], + request: service.ListAssetsRequest, + response: service.ListAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAssetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListAssetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Asset]: + for page in self.pages: + yield from page.assets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetsAsyncPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListAssetsResponse]], + request: service.ListAssetsRequest, + response: service.ListAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAssetsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListAssetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Asset]: + async def async_generator(): + async for page in self.pages: + for response in page.assets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetActionsPager: + """A pager for iterating through ``list_asset_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssetActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListAssetActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListAssetActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetActionsAsyncPager: + """A pager for iterating through ``list_asset_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssetActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListAssetActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListAssetActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListTasksResponse], + request: service.ListTasksRequest, + response: service.ListTasksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListTasksRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListTasksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tasks.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListTasksResponse]], + request: service.ListTasksRequest, + response: service.ListTasksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListTasksRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListTasksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tasks.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListJobsResponse], + request: service.ListJobsRequest, + response: service.ListJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tasks.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListJobsResponse]], + request: service.ListJobsRequest, + response: service.ListJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tasks.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnvironmentsPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListEnvironmentsResponse], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnvironmentsAsyncPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListSessionsResponse], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListSessionsResponse]], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst new file mode 100644 index 000000000000..a70e22115784 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataplexServiceTransport` is the ABC for all transports. +- public child `DataplexServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataplexServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataplexServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataplexServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py new file mode 100644 index 000000000000..4d9372a1b1ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataplexServiceTransport +from .grpc import DataplexServiceGrpcTransport +from .grpc_asyncio import DataplexServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] +_transport_registry['grpc'] = DataplexServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataplexServiceGrpcAsyncIOTransport + +__all__ = ( + 'DataplexServiceTransport', + 'DataplexServiceGrpcTransport', + 'DataplexServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py new file mode 100644 index 000000000000..4a351d5dd59b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py @@ -0,0 +1,833 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataplexServiceTransport(abc.ABC): + """Abstract transport class for DataplexService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_lake: gapic_v1.method.wrap_method( + self.create_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.update_lake: gapic_v1.method.wrap_method( + self.update_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_lake: gapic_v1.method.wrap_method( + self.delete_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.list_lakes: gapic_v1.method.wrap_method( + self.list_lakes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_lake: gapic_v1.method.wrap_method( + self.get_lake, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_lake_actions: gapic_v1.method.wrap_method( + self.list_lake_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: gapic_v1.method.wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: gapic_v1.method.wrap_method( + self.update_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: gapic_v1.method.wrap_method( + self.delete_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: gapic_v1.method.wrap_method( + self.list_zones, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: gapic_v1.method.wrap_method( + self.get_zone, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zone_actions: gapic_v1.method.wrap_method( + self.list_zone_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_asset: gapic_v1.method.wrap_method( + self.create_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_asset: gapic_v1.method.wrap_method( + self.update_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_asset: gapic_v1.method.wrap_method( + self.delete_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: gapic_v1.method.wrap_method( + self.list_assets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_asset: gapic_v1.method.wrap_method( + self.get_asset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_asset_actions: gapic_v1.method.wrap_method( + self.list_asset_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=60.0, + client_info=client_info, + ), + self.update_task: gapic_v1.method.wrap_method( + self.update_task, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=None, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_environment: gapic_v1.method.wrap_method( + self.create_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.update_environment: gapic_v1.method.wrap_method( + self.update_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_environment: gapic_v1.method.wrap_method( + self.delete_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: gapic_v1.method.wrap_method( + self.list_environments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_environment: gapic_v1.method.wrap_method( + self.get_environment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + Union[ + service.ListLakesResponse, + Awaitable[service.ListLakesResponse] + ]]: + raise NotImplementedError() + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + Union[ + resources.Lake, + Awaitable[resources.Lake] + ]]: + raise NotImplementedError() + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + Union[ + service.ListZonesResponse, + Awaitable[service.ListZonesResponse] + ]]: + raise NotImplementedError() + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + Union[ + resources.Zone, + Awaitable[resources.Zone] + ]]: + raise NotImplementedError() + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + Union[ + service.ListAssetsResponse, + Awaitable[service.ListAssetsResponse] + ]]: + raise NotImplementedError() + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + Union[ + resources.Asset, + Awaitable[resources.Asset] + ]]: + raise NotImplementedError() + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + Union[ + service.ListTasksResponse, + Awaitable[service.ListTasksResponse] + ]]: + raise NotImplementedError() + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + Union[ + tasks.Task, + Awaitable[tasks.Task] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + Union[ + service.ListJobsResponse, + Awaitable[service.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + Union[ + service.RunTaskResponse, + Awaitable[service.RunTaskResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + Union[ + tasks.Job, + Awaitable[tasks.Job] + ]]: + raise NotImplementedError() + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + Union[ + service.ListEnvironmentsResponse, + Awaitable[service.ListEnvironmentsResponse] + ]]: + raise NotImplementedError() + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + Union[ + analyze.Environment, + Awaitable[analyze.Environment] + ]]: + raise NotImplementedError() + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + Union[ + service.ListSessionsResponse, + Awaitable[service.ListSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataplexServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py new file mode 100644 index 000000000000..8760eb831d23 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py @@ -0,0 +1,1247 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO + + +class DataplexServiceGrpcTransport(DataplexServiceTransport): + """gRPC backend transport for DataplexService. + + Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create lake method over gRPC. + + Creates a lake resource. + + Returns: + Callable[[~.CreateLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_lake' not in self._stubs: + self._stubs['create_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateLake', + request_serializer=service.CreateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_lake'] + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update lake method over gRPC. + + Updates a lake resource. + + Returns: + Callable[[~.UpdateLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_lake' not in self._stubs: + self._stubs['update_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateLake', + request_serializer=service.UpdateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_lake'] + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete lake method over gRPC. + + Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + Returns: + Callable[[~.DeleteLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_lake' not in self._stubs: + self._stubs['delete_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteLake', + request_serializer=service.DeleteLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_lake'] + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + service.ListLakesResponse]: + r"""Return a callable for the list lakes method over gRPC. + + Lists lake resources in a project and location. + + Returns: + Callable[[~.ListLakesRequest], + ~.ListLakesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lakes' not in self._stubs: + self._stubs['list_lakes'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakes', + request_serializer=service.ListLakesRequest.serialize, + response_deserializer=service.ListLakesResponse.deserialize, + ) + return self._stubs['list_lakes'] + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + resources.Lake]: + r"""Return a callable for the get lake method over gRPC. + + Retrieves a lake resource. + + Returns: + Callable[[~.GetLakeRequest], + ~.Lake]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_lake' not in self._stubs: + self._stubs['get_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetLake', + request_serializer=service.GetLakeRequest.serialize, + response_deserializer=resources.Lake.deserialize, + ) + return self._stubs['get_lake'] + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list lake actions method over gRPC. + + Lists action resources in a lake. + + Returns: + Callable[[~.ListLakeActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lake_actions' not in self._stubs: + self._stubs['list_lake_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', + request_serializer=service.ListLakeActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_lake_actions'] + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the create zone method over gRPC. + + Creates a zone resource within a lake. + + Returns: + Callable[[~.CreateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_zone' not in self._stubs: + self._stubs['create_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateZone', + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_zone'] + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the update zone method over gRPC. + + Updates a zone resource. + + Returns: + Callable[[~.UpdateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_zone' not in self._stubs: + self._stubs['update_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateZone', + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_zone'] + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + Returns: + Callable[[~.DeleteZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_zone' not in self._stubs: + self._stubs['delete_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteZone', + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_zone'] + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + service.ListZonesResponse]: + r"""Return a callable for the list zones method over gRPC. + + Lists zone resources in a lake. + + Returns: + Callable[[~.ListZonesRequest], + ~.ListZonesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zones' not in self._stubs: + self._stubs['list_zones'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZones', + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs['list_zones'] + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + resources.Zone]: + r"""Return a callable for the get zone method over gRPC. + + Retrieves a zone resource. + + Returns: + Callable[[~.GetZoneRequest], + ~.Zone]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_zone' not in self._stubs: + self._stubs['get_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetZone', + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs['get_zone'] + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list zone actions method over gRPC. + + Lists action resources in a zone. + + Returns: + Callable[[~.ListZoneActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zone_actions' not in self._stubs: + self._stubs['list_zone_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', + request_serializer=service.ListZoneActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_zone_actions'] + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the create asset method over gRPC. + + Creates an asset resource. + + Returns: + Callable[[~.CreateAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_asset' not in self._stubs: + self._stubs['create_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateAsset', + request_serializer=service.CreateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_asset'] + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the update asset method over gRPC. + + Updates an asset resource. + + Returns: + Callable[[~.UpdateAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_asset' not in self._stubs: + self._stubs['update_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', + request_serializer=service.UpdateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_asset'] + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete asset method over gRPC. + + Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + Returns: + Callable[[~.DeleteAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_asset' not in self._stubs: + self._stubs['delete_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', + request_serializer=service.DeleteAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_asset'] + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + service.ListAssetsResponse]: + r"""Return a callable for the list assets method over gRPC. + + Lists asset resources in a zone. + + Returns: + Callable[[~.ListAssetsRequest], + ~.ListAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssets', + request_serializer=service.ListAssetsRequest.serialize, + response_deserializer=service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + resources.Asset]: + r"""Return a callable for the get asset method over gRPC. + + Retrieves an asset resource. + + Returns: + Callable[[~.GetAssetRequest], + ~.Asset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_asset' not in self._stubs: + self._stubs['get_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetAsset', + request_serializer=service.GetAssetRequest.serialize, + response_deserializer=resources.Asset.deserialize, + ) + return self._stubs['get_asset'] + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list asset actions method over gRPC. + + Lists action resources in an asset. + + Returns: + Callable[[~.ListAssetActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_asset_actions' not in self._stubs: + self._stubs['list_asset_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', + request_serializer=service.ListAssetActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_asset_actions'] + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the create task method over gRPC. + + Creates a task resource within a lake. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateTask', + request_serializer=service.CreateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_task'] + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the update task method over gRPC. + + Update the task resource. + + Returns: + Callable[[~.UpdateTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_task' not in self._stubs: + self._stubs['update_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateTask', + request_serializer=service.UpdateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_task'] + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete task method over gRPC. + + Delete the task resource. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteTask', + request_serializer=service.DeleteTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_task'] + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + service.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists tasks under the given lake. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListTasks', + request_serializer=service.ListTasksRequest.serialize, + response_deserializer=service.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + tasks.Task]: + r"""Return a callable for the get task method over gRPC. + + Get task resource. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetTask', + request_serializer=service.GetTaskRequest.serialize, + response_deserializer=tasks.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + service.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs under the given task. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListJobs', + request_serializer=service.ListJobsRequest.serialize, + response_deserializer=service.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + service.RunTaskResponse]: + r"""Return a callable for the run task method over gRPC. + + Run an on demand execution of a Task. + + Returns: + Callable[[~.RunTaskRequest], + ~.RunTaskResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/RunTask', + request_serializer=service.RunTaskRequest.serialize, + response_deserializer=service.RunTaskResponse.deserialize, + ) + return self._stubs['run_task'] + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + tasks.Job]: + r"""Return a callable for the get job method over gRPC. + + Get job resource. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetJob', + request_serializer=service.GetJobRequest.serialize, + response_deserializer=tasks.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel jobs running for the task resource. + + Returns: + Callable[[~.CancelJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CancelJob', + request_serializer=service.CancelJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_job'] + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_environment' not in self._stubs: + self._stubs['create_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_environment'] + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_environment' not in self._stubs: + self._stubs['update_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_environment'] + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_environment' not in self._stubs: + self._stubs['delete_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_environment'] + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + service.ListEnvironmentsResponse]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + ~.ListEnvironmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_environments' not in self._stubs: + self._stubs['list_environments'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs['list_environments'] + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + analyze.Environment]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + ~.Environment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_environment' not in self._stubs: + self._stubs['get_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs['get_environment'] + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + service.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListSessions', + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataplexServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d9f2b367ca58 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py @@ -0,0 +1,1593 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataplexServiceGrpcTransport + + +class DataplexServiceGrpcAsyncIOTransport(DataplexServiceTransport): + """gRPC AsyncIO backend transport for DataplexService. + + Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create lake method over gRPC. + + Creates a lake resource. + + Returns: + Callable[[~.CreateLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_lake' not in self._stubs: + self._stubs['create_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateLake', + request_serializer=service.CreateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_lake'] + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update lake method over gRPC. + + Updates a lake resource. + + Returns: + Callable[[~.UpdateLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_lake' not in self._stubs: + self._stubs['update_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateLake', + request_serializer=service.UpdateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_lake'] + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete lake method over gRPC. + + Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + Returns: + Callable[[~.DeleteLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_lake' not in self._stubs: + self._stubs['delete_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteLake', + request_serializer=service.DeleteLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_lake'] + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + Awaitable[service.ListLakesResponse]]: + r"""Return a callable for the list lakes method over gRPC. + + Lists lake resources in a project and location. + + Returns: + Callable[[~.ListLakesRequest], + Awaitable[~.ListLakesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lakes' not in self._stubs: + self._stubs['list_lakes'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakes', + request_serializer=service.ListLakesRequest.serialize, + response_deserializer=service.ListLakesResponse.deserialize, + ) + return self._stubs['list_lakes'] + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + Awaitable[resources.Lake]]: + r"""Return a callable for the get lake method over gRPC. + + Retrieves a lake resource. + + Returns: + Callable[[~.GetLakeRequest], + Awaitable[~.Lake]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_lake' not in self._stubs: + self._stubs['get_lake'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetLake', + request_serializer=service.GetLakeRequest.serialize, + response_deserializer=resources.Lake.deserialize, + ) + return self._stubs['get_lake'] + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list lake actions method over gRPC. + + Lists action resources in a lake. + + Returns: + Callable[[~.ListLakeActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lake_actions' not in self._stubs: + self._stubs['list_lake_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', + request_serializer=service.ListLakeActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_lake_actions'] + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create zone method over gRPC. + + Creates a zone resource within a lake. + + Returns: + Callable[[~.CreateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_zone' not in self._stubs: + self._stubs['create_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateZone', + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_zone'] + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update zone method over gRPC. + + Updates a zone resource. + + Returns: + Callable[[~.UpdateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_zone' not in self._stubs: + self._stubs['update_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateZone', + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_zone'] + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + Returns: + Callable[[~.DeleteZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_zone' not in self._stubs: + self._stubs['delete_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteZone', + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_zone'] + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + Awaitable[service.ListZonesResponse]]: + r"""Return a callable for the list zones method over gRPC. + + Lists zone resources in a lake. + + Returns: + Callable[[~.ListZonesRequest], + Awaitable[~.ListZonesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zones' not in self._stubs: + self._stubs['list_zones'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZones', + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs['list_zones'] + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + Awaitable[resources.Zone]]: + r"""Return a callable for the get zone method over gRPC. + + Retrieves a zone resource. + + Returns: + Callable[[~.GetZoneRequest], + Awaitable[~.Zone]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_zone' not in self._stubs: + self._stubs['get_zone'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetZone', + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs['get_zone'] + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list zone actions method over gRPC. + + Lists action resources in a zone. + + Returns: + Callable[[~.ListZoneActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zone_actions' not in self._stubs: + self._stubs['list_zone_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', + request_serializer=service.ListZoneActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_zone_actions'] + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create asset method over gRPC. + + Creates an asset resource. + + Returns: + Callable[[~.CreateAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_asset' not in self._stubs: + self._stubs['create_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateAsset', + request_serializer=service.CreateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_asset'] + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update asset method over gRPC. + + Updates an asset resource. + + Returns: + Callable[[~.UpdateAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_asset' not in self._stubs: + self._stubs['update_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', + request_serializer=service.UpdateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_asset'] + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete asset method over gRPC. + + Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + Returns: + Callable[[~.DeleteAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_asset' not in self._stubs: + self._stubs['delete_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', + request_serializer=service.DeleteAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_asset'] + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + Awaitable[service.ListAssetsResponse]]: + r"""Return a callable for the list assets method over gRPC. + + Lists asset resources in a zone. + + Returns: + Callable[[~.ListAssetsRequest], + Awaitable[~.ListAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssets', + request_serializer=service.ListAssetsRequest.serialize, + response_deserializer=service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + Awaitable[resources.Asset]]: + r"""Return a callable for the get asset method over gRPC. + + Retrieves an asset resource. + + Returns: + Callable[[~.GetAssetRequest], + Awaitable[~.Asset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_asset' not in self._stubs: + self._stubs['get_asset'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetAsset', + request_serializer=service.GetAssetRequest.serialize, + response_deserializer=resources.Asset.deserialize, + ) + return self._stubs['get_asset'] + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list asset actions method over gRPC. + + Lists action resources in an asset. + + Returns: + Callable[[~.ListAssetActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_asset_actions' not in self._stubs: + self._stubs['list_asset_actions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', + request_serializer=service.ListAssetActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_asset_actions'] + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task resource within a lake. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateTask', + request_serializer=service.CreateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_task'] + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update task method over gRPC. + + Update the task resource. + + Returns: + Callable[[~.UpdateTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_task' not in self._stubs: + self._stubs['update_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateTask', + request_serializer=service.UpdateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_task'] + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete task method over gRPC. + + Delete the task resource. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteTask', + request_serializer=service.DeleteTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_task'] + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + Awaitable[service.ListTasksResponse]]: + r"""Return a callable for the list tasks method over gRPC. + + Lists tasks under the given lake. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListTasks', + request_serializer=service.ListTasksRequest.serialize, + response_deserializer=service.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + Awaitable[tasks.Task]]: + r"""Return a callable for the get task method over gRPC. + + Get task resource. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetTask', + request_serializer=service.GetTaskRequest.serialize, + response_deserializer=tasks.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + Awaitable[service.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs under the given task. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListJobs', + request_serializer=service.ListJobsRequest.serialize, + response_deserializer=service.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + Awaitable[service.RunTaskResponse]]: + r"""Return a callable for the run task method over gRPC. + + Run an on demand execution of a Task. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.RunTaskResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/RunTask', + request_serializer=service.RunTaskRequest.serialize, + response_deserializer=service.RunTaskResponse.deserialize, + ) + return self._stubs['run_task'] + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + Awaitable[tasks.Job]]: + r"""Return a callable for the get job method over gRPC. + + Get job resource. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetJob', + request_serializer=service.GetJobRequest.serialize, + response_deserializer=tasks.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel jobs running for the task resource. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CancelJob', + request_serializer=service.CancelJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_job'] + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_environment' not in self._stubs: + self._stubs['create_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_environment'] + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_environment' not in self._stubs: + self._stubs['update_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_environment'] + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_environment' not in self._stubs: + self._stubs['delete_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_environment'] + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + Awaitable[service.ListEnvironmentsResponse]]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + Awaitable[~.ListEnvironmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_environments' not in self._stubs: + self._stubs['list_environments'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs['list_environments'] + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + Awaitable[analyze.Environment]]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + Awaitable[~.Environment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_environment' not in self._stubs: + self._stubs['get_environment'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs['get_environment'] + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + Awaitable[service.ListSessionsResponse]]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListSessions', + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_lake: self._wrap_method( + self.create_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.update_lake: self._wrap_method( + self.update_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_lake: self._wrap_method( + self.delete_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.list_lakes: self._wrap_method( + self.list_lakes, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_lake: self._wrap_method( + self.get_lake, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_lake_actions: self._wrap_method( + self.list_lake_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: self._wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: self._wrap_method( + self.update_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: self._wrap_method( + self.delete_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: self._wrap_method( + self.list_zones, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: self._wrap_method( + self.get_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zone_actions: self._wrap_method( + self.list_zone_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_asset: self._wrap_method( + self.create_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_asset: self._wrap_method( + self.update_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_asset: self._wrap_method( + self.delete_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: self._wrap_method( + self.list_assets, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_asset: self._wrap_method( + self.get_asset, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_asset_actions: self._wrap_method( + self.list_asset_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_task: self._wrap_method( + self.create_task, + default_timeout=60.0, + client_info=client_info, + ), + self.update_task: self._wrap_method( + self.update_task, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_task: self._wrap_method( + self.delete_task, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tasks: self._wrap_method( + self.list_tasks, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_task: self._wrap_method( + self.get_task, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: self._wrap_method( + self.list_jobs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_task: self._wrap_method( + self.run_task, + default_timeout=None, + client_info=client_info, + ), + self.get_job: self._wrap_method( + self.get_job, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: self._wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_environment: self._wrap_method( + self.create_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.update_environment: self._wrap_method( + self.update_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_environment: self._wrap_method( + self.delete_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: self._wrap_method( + self.list_environments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_environment: self._wrap_method( + self.get_environment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sessions: self._wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataplexServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py new file mode 100644 index 000000000000..8645ebc67c27 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetadataServiceClient +from .async_client import MetadataServiceAsyncClient + +__all__ = ( + 'MetadataServiceClient', + 'MetadataServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py new file mode 100644 index 000000000000..5ff1ef4a566b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -0,0 +1,1507 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport +from .client import MetadataServiceClient + + +class MetadataServiceAsyncClient: + """Metadata service manages metadata resources such as tables, + filesets and partitions. + """ + + _client: MetadataServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MetadataServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MetadataServiceClient._DEFAULT_UNIVERSE + + entity_path = staticmethod(MetadataServiceClient.entity_path) + parse_entity_path = staticmethod(MetadataServiceClient.parse_entity_path) + partition_path = staticmethod(MetadataServiceClient.partition_path) + parse_partition_path = staticmethod(MetadataServiceClient.parse_partition_path) + zone_path = staticmethod(MetadataServiceClient.zone_path) + parse_zone_path = staticmethod(MetadataServiceClient.parse_zone_path) + common_billing_account_path = staticmethod(MetadataServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetadataServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(MetadataServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(MetadataServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(MetadataServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(MetadataServiceClient.parse_common_organization_path) + common_project_path = staticmethod(MetadataServiceClient.common_project_path) + parse_common_project_path = staticmethod(MetadataServiceClient.parse_common_project_path) + common_location_path = staticmethod(MetadataServiceClient.common_location_path) + parse_common_location_path = staticmethod(MetadataServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceAsyncClient: The constructed client. + """ + return MetadataServiceClient.from_service_account_info.__func__(MetadataServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceAsyncClient: The constructed client. + """ + return MetadataServiceClient.from_service_account_file.__func__(MetadataServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetadataServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetadataServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetadataServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MetadataServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metadata service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetadataServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_entity(self, + request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity: Optional[metadata_.Entity] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = await client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]]): + The request object. Create a metadata entity request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (:class:`google.cloud.dataplex_v1.types.Entity`): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entity(self, + request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = await client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]]): + The request object. Update a metadata entity request. + The exiting entity will be fully + replaced by the entity in the request. + The entity ID is mutable. To modify the + ID, use the current entity ID in the + request URL and specify the new ID in + the request body. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entity.name", request.entity.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entity(self, + request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + await client.delete_entity(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]]): + The request object. Delete a metadata entity request. + name (:class:`str`): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_entity(self, + request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Get a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]]): + The request object. Get metadata entity request. + name (:class:`str`): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetEntityRequest): + request = metadata_.GetEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entities(self, + request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitiesAsyncPager: + r"""List metadata entities in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]]): + The request object. List metadata entities request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager: + List metadata entities response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListEntitiesRequest): + request = metadata_.ListEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_partition(self, + request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + partition: Optional[metadata_.Partition] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = await client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]]): + The request object. Create metadata partition request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (:class:`google.cloud.dataplex_v1.types.Partition`): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, partition]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_partition(self, + request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_partition(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]]): + The request object. Delete metadata partition request. + name (:class:`str`): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_partition(self, + request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Get a metadata partition of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]]): + The request object. Get metadata partition request. + name (:class:`str`): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetPartitionRequest): + request = metadata_.GetPartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_partitions(self, + request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPartitionsAsyncPager: + r"""List metadata partitions of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]]): + The request object. List metadata partitions request. + parent (:class:`str`): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager: + List metadata partitions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListPartitionsRequest): + request = metadata_.ListPartitionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPartitionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MetadataServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MetadataServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py new file mode 100644 index 000000000000..59ba45b296de --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -0,0 +1,1840 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetadataServiceGrpcTransport +from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport + + +class MetadataServiceClientMeta(type): + """Metaclass for the MetadataService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] + _transport_registry["grpc"] = MetadataServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MetadataServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MetadataServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetadataServiceClient(metaclass=MetadataServiceClientMeta): + """Metadata service manages metadata resources such as tables, + filesets and partitions. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetadataServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetadataServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: + """Returns a fully-qualified entity string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + + @staticmethod + def parse_entity_path(path: str) -> Dict[str,str]: + """Parses a entity path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def partition_path(project: str,location: str,lake: str,zone: str,entity: str,partition: str,) -> str: + """Returns a fully-qualified partition string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) + + @staticmethod + def parse_partition_path(path: str) -> Dict[str,str]: + """Parses a partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)/partitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def zone_path(project: str,location: str,lake: str,zone: str,) -> str: + """Returns a fully-qualified zone string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + + @staticmethod + def parse_zone_path(path: str) -> Dict[str,str]: + """Parses a zone path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MetadataServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metadata service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetadataServiceClient._read_environment_variables() + self._client_cert_source = MetadataServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = MetadataServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MetadataServiceTransport) + if transport_provided: + # transport is a MetadataServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MetadataServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + MetadataServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport]] = ( + MetadataServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetadataServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_entity(self, + request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity: Optional[metadata_.Entity] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): + The request object. Create a metadata entity request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity(self, + request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): + The request object. Update a metadata entity request. + The exiting entity will be fully + replaced by the entity in the request. + The entity ID is mutable. To modify the + ID, use the current entity ID in the + request URL and specify the new ID in + the request body. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entity.name", request.entity.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entity(self, + request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): + The request object. Delete a metadata entity request. + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_entity(self, + request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Get a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]): + The request object. Get metadata entity request. + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetEntityRequest): + request = metadata_.GetEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entities(self, + request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitiesPager: + r"""List metadata entities in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]): + The request object. List metadata entities request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager: + List metadata entities response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListEntitiesRequest): + request = metadata_.ListEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitiesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_partition(self, + request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + partition: Optional[metadata_.Partition] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): + The request object. Create metadata partition request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, partition]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_partition(self, + request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): + The request object. Delete metadata partition request. + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_partition(self, + request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Get a metadata partition of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]): + The request object. Get metadata partition request. + name (str): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetPartitionRequest): + request = metadata_.GetPartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_partitions(self, + request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPartitionsPager: + r"""List metadata partitions of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]): + The request object. List metadata partitions request. + parent (str): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager: + List metadata partitions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListPartitionsRequest): + request = metadata_.ListPartitionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPartitionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MetadataServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MetadataServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py new file mode 100644 index 000000000000..2c3cb84a3aa8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ + + +class ListEntitiesPager: + """A pager for iterating through ``list_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntities`` requests and continue to iterate + through the ``entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metadata_.ListEntitiesResponse], + request: metadata_.ListEntitiesRequest, + response: metadata_.ListEntitiesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntitiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntitiesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metadata_.ListEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metadata_.ListEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metadata_.Entity]: + for page in self.pages: + yield from page.entities + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntitiesAsyncPager: + """A pager for iterating through ``list_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntities`` requests and continue to iterate + through the ``entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metadata_.ListEntitiesResponse]], + request: metadata_.ListEntitiesRequest, + response: metadata_.ListEntitiesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntitiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntitiesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metadata_.ListEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metadata_.ListEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metadata_.Entity]: + async def async_generator(): + async for page in self.pages: + for response in page.entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPartitionsPager: + """A pager for iterating through ``list_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPartitions`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metadata_.ListPartitionsResponse], + request: metadata_.ListPartitionsRequest, + response: metadata_.ListPartitionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListPartitionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListPartitionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metadata_.ListPartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metadata_.ListPartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metadata_.Partition]: + for page in self.pages: + yield from page.partitions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPartitionsAsyncPager: + """A pager for iterating through ``list_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPartitions`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metadata_.ListPartitionsResponse]], + request: metadata_.ListPartitionsRequest, + response: metadata_.ListPartitionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListPartitionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListPartitionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metadata_.ListPartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metadata_.ListPartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metadata_.Partition]: + async def async_generator(): + async for page in self.pages: + for response in page.partitions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst new file mode 100644 index 000000000000..ff25cadba5cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetadataServiceTransport` is the ABC for all transports. +- public child `MetadataServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetadataServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetadataServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetadataServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py new file mode 100644 index 000000000000..74054de3d263 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetadataServiceTransport +from .grpc import MetadataServiceGrpcTransport +from .grpc_asyncio import MetadataServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] +_transport_registry['grpc'] = MetadataServiceGrpcTransport +_transport_registry['grpc_asyncio'] = MetadataServiceGrpcAsyncIOTransport + +__all__ = ( + 'MetadataServiceTransport', + 'MetadataServiceGrpcTransport', + 'MetadataServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py new file mode 100644 index 000000000000..88a53d1b4543 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MetadataServiceTransport(abc.ABC): + """Abstract transport class for MetadataService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_entity: gapic_v1.method.wrap_method( + self.create_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entity: gapic_v1.method.wrap_method( + self.update_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entity: gapic_v1.method.wrap_method( + self.delete_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.get_entity: gapic_v1.method.wrap_method( + self.get_entity, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_entities: gapic_v1.method.wrap_method( + self.list_entities, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_partition: gapic_v1.method.wrap_method( + self.create_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_partition: gapic_v1.method.wrap_method( + self.delete_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.get_partition: gapic_v1.method.wrap_method( + self.get_partition, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_partitions: gapic_v1.method.wrap_method( + self.list_partitions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + Union[ + metadata_.ListEntitiesResponse, + Awaitable[metadata_.ListEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + Union[ + metadata_.Partition, + Awaitable[metadata_.Partition] + ]]: + raise NotImplementedError() + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + Union[ + metadata_.Partition, + Awaitable[metadata_.Partition] + ]]: + raise NotImplementedError() + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + Union[ + metadata_.ListPartitionsResponse, + Awaitable[metadata_.ListPartitionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MetadataServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py new file mode 100644 index 000000000000..b89c63165152 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py @@ -0,0 +1,593 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO + + +class MetadataServiceGrpcTransport(MetadataServiceTransport): + """gRPC backend transport for MetadataService. + + Metadata service manages metadata resources such as tables, + filesets and partitions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + metadata_.Entity]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entity' not in self._stubs: + self._stubs['create_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreateEntity', + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['create_entity'] + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + metadata_.Entity]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entity' not in self._stubs: + self._stubs['update_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['update_entity'] + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entity' not in self._stubs: + self._stubs['delete_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entity'] + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + metadata_.Entity]: + r"""Return a callable for the get entity method over gRPC. + + Get a metadata entity. + + Returns: + Callable[[~.GetEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entity' not in self._stubs: + self._stubs['get_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetEntity', + request_serializer=metadata_.GetEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['get_entity'] + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + metadata_.ListEntitiesResponse]: + r"""Return a callable for the list entities method over gRPC. + + List metadata entities in a zone. + + Returns: + Callable[[~.ListEntitiesRequest], + ~.ListEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entities' not in self._stubs: + self._stubs['list_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListEntities', + request_serializer=metadata_.ListEntitiesRequest.serialize, + response_deserializer=metadata_.ListEntitiesResponse.deserialize, + ) + return self._stubs['list_entities'] + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + metadata_.Partition]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + ~.Partition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_partition' not in self._stubs: + self._stubs['create_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreatePartition', + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['create_partition'] + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_partition' not in self._stubs: + self._stubs['delete_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeletePartition', + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_partition'] + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + metadata_.Partition]: + r"""Return a callable for the get partition method over gRPC. + + Get a metadata partition of an entity. + + Returns: + Callable[[~.GetPartitionRequest], + ~.Partition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_partition' not in self._stubs: + self._stubs['get_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetPartition', + request_serializer=metadata_.GetPartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['get_partition'] + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + metadata_.ListPartitionsResponse]: + r"""Return a callable for the list partitions method over gRPC. + + List metadata partitions of an entity. + + Returns: + Callable[[~.ListPartitionsRequest], + ~.ListPartitionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_partitions' not in self._stubs: + self._stubs['list_partitions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListPartitions', + request_serializer=metadata_.ListPartitionsRequest.serialize, + response_deserializer=metadata_.ListPartitionsResponse.deserialize, + ) + return self._stubs['list_partitions'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'MetadataServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..188524e825f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py @@ -0,0 +1,720 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import MetadataServiceGrpcTransport + + +class MetadataServiceGrpcAsyncIOTransport(MetadataServiceTransport): + """gRPC AsyncIO backend transport for MetadataService. + + Metadata service manages metadata resources such as tables, + filesets and partitions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entity' not in self._stubs: + self._stubs['create_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreateEntity', + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['create_entity'] + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entity' not in self._stubs: + self._stubs['update_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['update_entity'] + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entity' not in self._stubs: + self._stubs['delete_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entity'] + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the get entity method over gRPC. + + Get a metadata entity. + + Returns: + Callable[[~.GetEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entity' not in self._stubs: + self._stubs['get_entity'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetEntity', + request_serializer=metadata_.GetEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['get_entity'] + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + Awaitable[metadata_.ListEntitiesResponse]]: + r"""Return a callable for the list entities method over gRPC. + + List metadata entities in a zone. + + Returns: + Callable[[~.ListEntitiesRequest], + Awaitable[~.ListEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entities' not in self._stubs: + self._stubs['list_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListEntities', + request_serializer=metadata_.ListEntitiesRequest.serialize, + response_deserializer=metadata_.ListEntitiesResponse.deserialize, + ) + return self._stubs['list_entities'] + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + Awaitable[metadata_.Partition]]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + Awaitable[~.Partition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_partition' not in self._stubs: + self._stubs['create_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreatePartition', + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['create_partition'] + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_partition' not in self._stubs: + self._stubs['delete_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeletePartition', + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_partition'] + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + Awaitable[metadata_.Partition]]: + r"""Return a callable for the get partition method over gRPC. + + Get a metadata partition of an entity. + + Returns: + Callable[[~.GetPartitionRequest], + Awaitable[~.Partition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_partition' not in self._stubs: + self._stubs['get_partition'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetPartition', + request_serializer=metadata_.GetPartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['get_partition'] + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + Awaitable[metadata_.ListPartitionsResponse]]: + r"""Return a callable for the list partitions method over gRPC. + + List metadata partitions of an entity. + + Returns: + Callable[[~.ListPartitionsRequest], + Awaitable[~.ListPartitionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_partitions' not in self._stubs: + self._stubs['list_partitions'] = self.grpc_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListPartitions', + request_serializer=metadata_.ListPartitionsRequest.serialize, + response_deserializer=metadata_.ListPartitionsResponse.deserialize, + ) + return self._stubs['list_partitions'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entity: self._wrap_method( + self.create_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entity: self._wrap_method( + self.update_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entity: self._wrap_method( + self.delete_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.get_entity: self._wrap_method( + self.get_entity, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_entities: self._wrap_method( + self.list_entities, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_partition: self._wrap_method( + self.create_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_partition: self._wrap_method( + self.delete_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.get_partition: self._wrap_method( + self.get_partition, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_partitions: self._wrap_method( + self.list_partitions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'MetadataServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py new file mode 100644 index 000000000000..99c2580b712c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py @@ -0,0 +1,408 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .analyze import ( + Content, + Environment, + Session, +) +from .catalog import ( + Aspect, + AspectSource, + AspectType, + CancelMetadataJobRequest, + CreateAspectTypeRequest, + CreateEntryGroupRequest, + CreateEntryRequest, + CreateEntryTypeRequest, + CreateMetadataJobRequest, + DeleteAspectTypeRequest, + DeleteEntryGroupRequest, + DeleteEntryRequest, + DeleteEntryTypeRequest, + Entry, + EntryGroup, + EntrySource, + EntryType, + GetAspectTypeRequest, + GetEntryGroupRequest, + GetEntryRequest, + GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, + ListAspectTypesRequest, + ListAspectTypesResponse, + ListEntriesRequest, + ListEntriesResponse, + ListEntryGroupsRequest, + ListEntryGroupsResponse, + ListEntryTypesRequest, + ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, + LookupEntryRequest, + MetadataJob, + SearchEntriesRequest, + SearchEntriesResponse, + SearchEntriesResult, + UpdateAspectTypeRequest, + UpdateEntryGroupRequest, + UpdateEntryRequest, + UpdateEntryTypeRequest, + EntryView, + TransferStatus, +) +from .content import ( + CreateContentRequest, + DeleteContentRequest, + GetContentRequest, + ListContentRequest, + ListContentResponse, + UpdateContentRequest, +) +from .data_discovery import ( + DataDiscoveryResult, + DataDiscoverySpec, +) +from .data_profile import ( + DataProfileResult, + DataProfileSpec, +) +from .data_quality import ( + DataQualityColumnResult, + DataQualityDimension, + DataQualityDimensionResult, + DataQualityResult, + DataQualityRule, + DataQualityRuleResult, + DataQualitySpec, +) +from .data_taxonomy import ( + CreateDataAttributeBindingRequest, + CreateDataAttributeRequest, + CreateDataTaxonomyRequest, + DataAttribute, + DataAttributeBinding, + DataTaxonomy, + DeleteDataAttributeBindingRequest, + DeleteDataAttributeRequest, + DeleteDataTaxonomyRequest, + GetDataAttributeBindingRequest, + GetDataAttributeRequest, + GetDataTaxonomyRequest, + ListDataAttributeBindingsRequest, + ListDataAttributeBindingsResponse, + ListDataAttributesRequest, + ListDataAttributesResponse, + ListDataTaxonomiesRequest, + ListDataTaxonomiesResponse, + UpdateDataAttributeBindingRequest, + UpdateDataAttributeRequest, + UpdateDataTaxonomyRequest, +) +from .datascans import ( + CreateDataScanRequest, + DataScan, + DataScanJob, + DeleteDataScanRequest, + GenerateDataQualityRulesRequest, + GenerateDataQualityRulesResponse, + GetDataScanJobRequest, + GetDataScanRequest, + ListDataScanJobsRequest, + ListDataScanJobsResponse, + ListDataScansRequest, + ListDataScansResponse, + RunDataScanRequest, + RunDataScanResponse, + UpdateDataScanRequest, + DataScanType, +) +from .logs import ( + DataQualityScanRuleResult, + DataScanEvent, + DiscoveryEvent, + GovernanceEvent, + JobEvent, + SessionEvent, +) +from .metadata_ import ( + CreateEntityRequest, + CreatePartitionRequest, + DeleteEntityRequest, + DeletePartitionRequest, + Entity, + GetEntityRequest, + GetPartitionRequest, + ListEntitiesRequest, + ListEntitiesResponse, + ListPartitionsRequest, + ListPartitionsResponse, + Partition, + Schema, + StorageAccess, + StorageFormat, + UpdateEntityRequest, + StorageSystem, +) +from .processing import ( + DataSource, + ScannedData, + Trigger, +) +from .resources import ( + Action, + Asset, + AssetStatus, + Lake, + Zone, + State, +) +from .security import ( + DataAccessSpec, + ResourceAccessSpec, +) +from .service import ( + CancelJobRequest, + CreateAssetRequest, + CreateEnvironmentRequest, + CreateLakeRequest, + CreateTaskRequest, + CreateZoneRequest, + DeleteAssetRequest, + DeleteEnvironmentRequest, + DeleteLakeRequest, + DeleteTaskRequest, + DeleteZoneRequest, + GetAssetRequest, + GetEnvironmentRequest, + GetJobRequest, + GetLakeRequest, + GetTaskRequest, + GetZoneRequest, + ListActionsResponse, + ListAssetActionsRequest, + ListAssetsRequest, + ListAssetsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + ListJobsRequest, + ListJobsResponse, + ListLakeActionsRequest, + ListLakesRequest, + ListLakesResponse, + ListSessionsRequest, + ListSessionsResponse, + ListTasksRequest, + ListTasksResponse, + ListZoneActionsRequest, + ListZonesRequest, + ListZonesResponse, + OperationMetadata, + RunTaskRequest, + RunTaskResponse, + UpdateAssetRequest, + UpdateEnvironmentRequest, + UpdateLakeRequest, + UpdateTaskRequest, + UpdateZoneRequest, +) +from .tasks import ( + Job, + Task, +) + +__all__ = ( + 'Content', + 'Environment', + 'Session', + 'Aspect', + 'AspectSource', + 'AspectType', + 'CancelMetadataJobRequest', + 'CreateAspectTypeRequest', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateEntryTypeRequest', + 'CreateMetadataJobRequest', + 'DeleteAspectTypeRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteEntryTypeRequest', + 'Entry', + 'EntryGroup', + 'EntrySource', + 'EntryType', + 'GetAspectTypeRequest', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetEntryTypeRequest', + 'GetMetadataJobRequest', + 'ImportItem', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'LookupEntryRequest', + 'MetadataJob', + 'SearchEntriesRequest', + 'SearchEntriesResponse', + 'SearchEntriesResult', + 'UpdateAspectTypeRequest', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateEntryTypeRequest', + 'EntryView', + 'TransferStatus', + 'CreateContentRequest', + 'DeleteContentRequest', + 'GetContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'UpdateContentRequest', + 'DataDiscoveryResult', + 'DataDiscoverySpec', + 'DataProfileResult', + 'DataProfileSpec', + 'DataQualityColumnResult', + 'DataQualityDimension', + 'DataQualityDimensionResult', + 'DataQualityResult', + 'DataQualityRule', + 'DataQualityRuleResult', + 'DataQualitySpec', + 'CreateDataAttributeBindingRequest', + 'CreateDataAttributeRequest', + 'CreateDataTaxonomyRequest', + 'DataAttribute', + 'DataAttributeBinding', + 'DataTaxonomy', + 'DeleteDataAttributeBindingRequest', + 'DeleteDataAttributeRequest', + 'DeleteDataTaxonomyRequest', + 'GetDataAttributeBindingRequest', + 'GetDataAttributeRequest', + 'GetDataTaxonomyRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'UpdateDataAttributeBindingRequest', + 'UpdateDataAttributeRequest', + 'UpdateDataTaxonomyRequest', + 'CreateDataScanRequest', + 'DataScan', + 'DataScanJob', + 'DeleteDataScanRequest', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'GetDataScanJobRequest', + 'GetDataScanRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'UpdateDataScanRequest', + 'DataScanType', + 'DataQualityScanRuleResult', + 'DataScanEvent', + 'DiscoveryEvent', + 'GovernanceEvent', + 'JobEvent', + 'SessionEvent', + 'CreateEntityRequest', + 'CreatePartitionRequest', + 'DeleteEntityRequest', + 'DeletePartitionRequest', + 'Entity', + 'GetEntityRequest', + 'GetPartitionRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'ListPartitionsRequest', + 'ListPartitionsResponse', + 'Partition', + 'Schema', + 'StorageAccess', + 'StorageFormat', + 'UpdateEntityRequest', + 'StorageSystem', + 'DataSource', + 'ScannedData', + 'Trigger', + 'Action', + 'Asset', + 'AssetStatus', + 'Lake', + 'Zone', + 'State', + 'DataAccessSpec', + 'ResourceAccessSpec', + 'CancelJobRequest', + 'CreateAssetRequest', + 'CreateEnvironmentRequest', + 'CreateLakeRequest', + 'CreateTaskRequest', + 'CreateZoneRequest', + 'DeleteAssetRequest', + 'DeleteEnvironmentRequest', + 'DeleteLakeRequest', + 'DeleteTaskRequest', + 'DeleteZoneRequest', + 'GetAssetRequest', + 'GetEnvironmentRequest', + 'GetJobRequest', + 'GetLakeRequest', + 'GetTaskRequest', + 'GetZoneRequest', + 'ListActionsResponse', + 'ListAssetActionsRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListLakeActionsRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'ListZoneActionsRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'OperationMetadata', + 'RunTaskRequest', + 'RunTaskResponse', + 'UpdateAssetRequest', + 'UpdateEnvironmentRequest', + 'UpdateLakeRequest', + 'UpdateTaskRequest', + 'UpdateZoneRequest', + 'Job', + 'Task', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py new file mode 100644 index 000000000000..a12a3b34e864 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py @@ -0,0 +1,491 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import resources +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Environment', + 'Content', + 'Session', + }, +) + + +class Environment(proto.Message): + r"""Environment represents a user-visible compute infrastructure + for analytics within a lake. + + Attributes: + name (str): + Output only. The relative resource name of the environment, + of the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the environment. This ID will be + different if the environment is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Environment creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the environment + was last updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the + environment. + description (str): + Optional. Description of the environment. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the + environment. + infrastructure_spec (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec): + Required. Infrastructure specification for + the Environment. + session_spec (google.cloud.dataplex_v1.types.Environment.SessionSpec): + Optional. Configuration for sessions created + for this environment. + session_status (google.cloud.dataplex_v1.types.Environment.SessionStatus): + Output only. Status of sessions created for + this environment. + endpoints (google.cloud.dataplex_v1.types.Environment.Endpoints): + Output only. URI Endpoints to access sessions + associated with the Environment. + """ + + class InfrastructureSpec(proto.Message): + r"""Configuration for the underlying infrastructure used to run + workloads. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + compute (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.ComputeResources): + Optional. Compute resources needed for + analyze interactive workloads. + + This field is a member of `oneof`_ ``resources``. + os_image (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime): + Required. Software Runtime Configuration for + analyze interactive workloads. + + This field is a member of `oneof`_ ``runtime``. + """ + + class ComputeResources(proto.Message): + r"""Compute resources associated with the analyze interactive + workloads. + + Attributes: + disk_size_gb (int): + Optional. Size in GB of the disk. Default is + 100 GB. + node_count (int): + Optional. Total number of nodes in the + sessions created for this environment. + max_node_count (int): + Optional. Max configurable nodes. If max_node_count > + node_count, then auto-scaling is enabled. + """ + + disk_size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + node_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + + class OsImageRuntime(proto.Message): + r"""Software Runtime Configuration to run Analyze. + + Attributes: + image_version (str): + Required. Dataplex Image version. + java_libraries (MutableSequence[str]): + Optional. List of Java jars to be included in + the runtime environment. Valid input includes + Cloud Storage URIs to Jar binaries. For example, + gs://bucket-name/my/path/to/file.jar + python_packages (MutableSequence[str]): + Optional. A list of python packages to be + installed. Valid formats include Cloud Storage + URI to a PIP installable library. For example, + gs://bucket-name/my/path/to/lib.tar.gz + properties (MutableMapping[str, str]): + Optional. Spark properties to provide configuration for use + in sessions created for this environment. The properties to + set on daemon config files. Property keys are specified in + ``prefix:property`` format. The prefix must be "spark". + """ + + image_version: str = proto.Field( + proto.STRING, + number=1, + ) + java_libraries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + compute: 'Environment.InfrastructureSpec.ComputeResources' = proto.Field( + proto.MESSAGE, + number=50, + oneof='resources', + message='Environment.InfrastructureSpec.ComputeResources', + ) + os_image: 'Environment.InfrastructureSpec.OsImageRuntime' = proto.Field( + proto.MESSAGE, + number=100, + oneof='runtime', + message='Environment.InfrastructureSpec.OsImageRuntime', + ) + + class SessionSpec(proto.Message): + r"""Configuration for sessions created for this environment. + + Attributes: + max_idle_duration (google.protobuf.duration_pb2.Duration): + Optional. The idle time configuration of the + session. The session will be auto-terminated at + the end of this period. + enable_fast_startup (bool): + Optional. If True, this causes sessions to be + pre-created and available for faster startup to + enable interactive exploration use-cases. This + defaults to False to avoid additional billed + charges. These can only be set to True for the + environment with name set to "default", and with + default configuration. + """ + + max_idle_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + enable_fast_startup: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class SessionStatus(proto.Message): + r"""Status of sessions created for this environment. + + Attributes: + active (bool): + Output only. Queries over sessions to mark + whether the environment is currently active or + not + """ + + active: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class Endpoints(proto.Message): + r"""URI Endpoints to access sessions associated with the + Environment. + + Attributes: + notebooks (str): + Output only. URI to serve notebook APIs + sql (str): + Output only. URI to serve SQL APIs + """ + + notebooks: str = proto.Field( + proto.STRING, + number=1, + ) + sql: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=8, + enum=resources.State, + ) + infrastructure_spec: InfrastructureSpec = proto.Field( + proto.MESSAGE, + number=100, + message=InfrastructureSpec, + ) + session_spec: SessionSpec = proto.Field( + proto.MESSAGE, + number=101, + message=SessionSpec, + ) + session_status: SessionStatus = proto.Field( + proto.MESSAGE, + number=102, + message=SessionStatus, + ) + endpoints: Endpoints = proto.Field( + proto.MESSAGE, + number=200, + message=Endpoints, + ) + + +class Content(proto.Message): + r"""Content represents a user-visible notebook or a sql script + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + uid (str): + Output only. System generated globally unique + ID for the content. This ID will be different if + the content is deleted and re-created with the + same name. + path (str): + Required. The path for the Content file, + represented as directory structure. Unique + within a lake. Limited to alphanumerics, + hyphens, underscores, dots and slashes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Content creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the content was + last updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the + content. + description (str): + Optional. Description of the content. + data_text (str): + Required. Content data in string format. + + This field is a member of `oneof`_ ``data``. + sql_script (google.cloud.dataplex_v1.types.Content.SqlScript): + Sql Script related configurations. + + This field is a member of `oneof`_ ``content``. + notebook (google.cloud.dataplex_v1.types.Content.Notebook): + Notebook related configurations. + + This field is a member of `oneof`_ ``content``. + """ + + class SqlScript(proto.Message): + r"""Configuration for the Sql Script content. + + Attributes: + engine (google.cloud.dataplex_v1.types.Content.SqlScript.QueryEngine): + Required. Query Engine to be used for the Sql + Query. + """ + class QueryEngine(proto.Enum): + r"""Query Engine Type of the SQL Script. + + Values: + QUERY_ENGINE_UNSPECIFIED (0): + Value was unspecified. + SPARK (2): + Spark SQL Query. + """ + QUERY_ENGINE_UNSPECIFIED = 0 + SPARK = 2 + + engine: 'Content.SqlScript.QueryEngine' = proto.Field( + proto.ENUM, + number=1, + enum='Content.SqlScript.QueryEngine', + ) + + class Notebook(proto.Message): + r"""Configuration for Notebook content. + + Attributes: + kernel_type (google.cloud.dataplex_v1.types.Content.Notebook.KernelType): + Required. Kernel Type of the notebook. + """ + class KernelType(proto.Enum): + r"""Kernel Type of the Jupyter notebook. + + Values: + KERNEL_TYPE_UNSPECIFIED (0): + Kernel Type unspecified. + PYTHON3 (1): + Python 3 Kernel. + """ + KERNEL_TYPE_UNSPECIFIED = 0 + PYTHON3 = 1 + + kernel_type: 'Content.Notebook.KernelType' = proto.Field( + proto.ENUM, + number=1, + enum='Content.Notebook.KernelType', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + path: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + data_text: str = proto.Field( + proto.STRING, + number=9, + oneof='data', + ) + sql_script: SqlScript = proto.Field( + proto.MESSAGE, + number=100, + oneof='content', + message=SqlScript, + ) + notebook: Notebook = proto.Field( + proto.MESSAGE, + number=101, + oneof='content', + message=Notebook, + ) + + +class Session(proto.Message): + r"""Represents an active analyze session running for a user. + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}/sessions/{session_id} + user_id (str): + Output only. Email of user running the + session. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Session start time. + state (google.cloud.dataplex_v1.types.State): + Output only. State of Session + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=4, + enum=resources.State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py new file mode 100644 index 000000000000..b1233e502d15 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py @@ -0,0 +1,2630 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'EntryView', + 'TransferStatus', + 'AspectType', + 'EntryGroup', + 'EntryType', + 'Aspect', + 'AspectSource', + 'Entry', + 'EntrySource', + 'CreateEntryGroupRequest', + 'UpdateEntryGroupRequest', + 'DeleteEntryGroupRequest', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'GetEntryGroupRequest', + 'CreateEntryTypeRequest', + 'UpdateEntryTypeRequest', + 'DeleteEntryTypeRequest', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'GetEntryTypeRequest', + 'CreateAspectTypeRequest', + 'UpdateAspectTypeRequest', + 'DeleteAspectTypeRequest', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'GetAspectTypeRequest', + 'CreateEntryRequest', + 'UpdateEntryRequest', + 'DeleteEntryRequest', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'GetEntryRequest', + 'LookupEntryRequest', + 'SearchEntriesRequest', + 'SearchEntriesResult', + 'SearchEntriesResponse', + 'ImportItem', + 'CreateMetadataJobRequest', + 'GetMetadataJobRequest', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'CancelMetadataJobRequest', + 'MetadataJob', + }, +) + + +class EntryView(proto.Enum): + r"""View for controlling which parts of an entry are to be + returned. + + Values: + ENTRY_VIEW_UNSPECIFIED (0): + Unspecified EntryView. Defaults to FULL. + BASIC (1): + Returns entry only, without aspects. + FULL (2): + Returns all required aspects as well as the + keys of all non-required aspects. + CUSTOM (3): + Returns aspects matching custom fields in + GetEntryRequest. If the number of aspects + exceeds 100, the first 100 will be returned. + ALL (4): + Returns all aspects. If the number of aspects + exceeds 100, the first 100 will be returned. + """ + ENTRY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + CUSTOM = 3 + ALL = 4 + + +class TransferStatus(proto.Enum): + r"""Denotes the transfer status of a resource. It is unspecified + for resources created from Dataplex API. + + Values: + TRANSFER_STATUS_UNSPECIFIED (0): + The default value. It is set for resources + that were not subject for migration from Data + Catalog service. + TRANSFER_STATUS_MIGRATED (1): + Indicates that a resource was migrated from + Data Catalog service but it hasn't been + transferred yet. In particular the resource + cannot be updated from Dataplex API. + TRANSFER_STATUS_TRANSFERRED (2): + Indicates that a resource was transferred + from Data Catalog service. The resource can only + be updated from Dataplex API. + """ + TRANSFER_STATUS_UNSPECIFIED = 0 + TRANSFER_STATUS_MIGRATED = 1 + TRANSFER_STATUS_TRANSFERRED = 2 + + +class AspectType(proto.Message): + r"""AspectType is a template for creating Aspects, and represents + the JSON-schema for a given Entry, for example, BigQuery Table + Schema. + + Attributes: + name (str): + Output only. The relative resource name of the AspectType, + of the form: + projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. + uid (str): + Output only. System generated globally unique + ID for the AspectType. If you delete and + recreate the AspectType with the same name, then + this ID will be different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the AspectType was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the AspectType was + last updated. + description (str): + Optional. Description of the AspectType. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + AspectType. + etag (str): + The service computes this checksum. The + client may send it on update and delete requests + to ensure it has an up-to-date value before + proceeding. + authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): + Immutable. Defines the Authorization for this + type. + metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Required. MetadataTemplate of the aspect. + transfer_status (google.cloud.dataplex_v1.types.TransferStatus): + Output only. Denotes the transfer status of + the Aspect Type. It is unspecified for Aspect + Types created from Dataplex API. + """ + + class Authorization(proto.Message): + r"""Autorization for an AspectType. + + Attributes: + alternate_use_permission (str): + Immutable. The IAM permission grantable on + the EntryGroup to allow access to instantiate + Aspects of Dataplex owned AspectTypes, only + settable for Dataplex owned Types. + """ + + alternate_use_permission: str = proto.Field( + proto.STRING, + number=1, + ) + + class MetadataTemplate(proto.Message): + r"""MetadataTemplate definition for an AspectType. + + Attributes: + index (int): + Optional. Index is used to encode Template + messages. The value of index can range between 1 + and 2,147,483,647. Index must be unique within + all fields in a Template. (Nested Templates can + reuse indexes). Once a Template is defined, the + index cannot be changed, because it identifies + the field in the actual storage format. Index is + a mandatory field, but it is optional for top + level fields, and map/array "values" + definitions. + name (str): + Required. The name of the field. + type_ (str): + Required. The datatype of this field. The following values + are supported: + + Primitive types: + + - string + - integer + - boolean + - double + - datetime. Must be of the format RFC3339 UTC "Zulu" + (Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z"). + + Complex types: + + - enum + - array + - map + - record + record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): + Optional. Field definition. You must specify + it if the type is record. It defines the nested + fields. + enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): + Optional. The list of values for an enum + type. You must define it if the type is enum. + map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Optional. If the type is map, set map_items. map_items can + refer to a primitive field or a complex (record only) field. + To specify a primitive field, you only need to set name and + type in the nested MetadataTemplate. The recommended value + for the name field is item, as this isn't used in the actual + payload. + array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Optional. If the type is array, set array_items. array_items + can refer to a primitive field or a complex (record only) + field. To specify a primitive field, you only need to set + name and type in the nested MetadataTemplate. The + recommended value for the name field is item, as this isn't + used in the actual payload. + type_id (str): + Optional. You can use type id if this + definition of the field needs to be reused + later. The type id must be unique across the + entire template. You can only specify it if the + field type is record. + type_ref (str): + Optional. A reference to another field + definition (not an inline definition). The value + must be equal to the value of an id field + defined elsewhere in the MetadataTemplate. Only + fields with record type can refer to other + fields. + constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): + Optional. Specifies the constraints on this + field. + annotations (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Annotations): + Optional. Specifies annotations on this + field. + """ + + class EnumValue(proto.Message): + r"""Definition of Enumvalue, to be used for enum fields. + + Attributes: + index (int): + Required. Index for the enum value. It can't + be modified. + name (str): + Required. Name of the enumvalue. This is the + actual value that the aspect can contain. + deprecated (str): + Optional. You can set this message if you + need to deprecate an enum value. + """ + + index: int = proto.Field( + proto.INT32, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + deprecated: str = proto.Field( + proto.STRING, + number=3, + ) + + class Constraints(proto.Message): + r"""Definition of the constraints of a field. + + Attributes: + required (bool): + Optional. Marks this field as optional or + required. + """ + + required: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class Annotations(proto.Message): + r"""Definition of the annotations of a field. + + Attributes: + deprecated (str): + Optional. Marks a field as deprecated. You + can include a deprecation message. + display_name (str): + Optional. Display name for a field. + description (str): + Optional. Description for a field. + display_order (int): + Optional. Display order for a field. You can + use this to reorder where a field is rendered. + string_type (str): + Optional. You can use String Type annotations to specify + special meaning to string fields. The following values are + supported: + + - richText: The field must be interpreted as a rich text + field. + - url: A fully qualified URL link. + - resource: A service qualified resource reference. + string_values (MutableSequence[str]): + Optional. Suggested hints for string fields. + You can use them to suggest values to users + through console. + """ + + deprecated: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + display_order: int = proto.Field( + proto.INT32, + number=4, + ) + string_type: str = proto.Field( + proto.STRING, + number=6, + ) + string_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + index: int = proto.Field( + proto.INT32, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=5, + ) + record_fields: MutableSequence['AspectType.MetadataTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AspectType.MetadataTemplate', + ) + enum_values: MutableSequence['AspectType.MetadataTemplate.EnumValue'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='AspectType.MetadataTemplate.EnumValue', + ) + map_items: 'AspectType.MetadataTemplate' = proto.Field( + proto.MESSAGE, + number=10, + message='AspectType.MetadataTemplate', + ) + array_items: 'AspectType.MetadataTemplate' = proto.Field( + proto.MESSAGE, + number=11, + message='AspectType.MetadataTemplate', + ) + type_id: str = proto.Field( + proto.STRING, + number=12, + ) + type_ref: str = proto.Field( + proto.STRING, + number=13, + ) + constraints: 'AspectType.MetadataTemplate.Constraints' = proto.Field( + proto.MESSAGE, + number=50, + message='AspectType.MetadataTemplate.Constraints', + ) + annotations: 'AspectType.MetadataTemplate.Annotations' = proto.Field( + proto.MESSAGE, + number=51, + message='AspectType.MetadataTemplate.Annotations', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + authorization: Authorization = proto.Field( + proto.MESSAGE, + number=52, + message=Authorization, + ) + metadata_template: MetadataTemplate = proto.Field( + proto.MESSAGE, + number=53, + message=MetadataTemplate, + ) + transfer_status: 'TransferStatus' = proto.Field( + proto.ENUM, + number=202, + enum='TransferStatus', + ) + + +class EntryGroup(proto.Message): + r"""An Entry Group represents a logical grouping of one or more + Entries. + + Attributes: + name (str): + Output only. The relative resource name of the EntryGroup, + in the format + projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. + uid (str): + Output only. System generated globally unique + ID for the EntryGroup. If you delete and + recreate the EntryGroup with the same name, this + ID will be different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryGroup was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryGroup was + last updated. + description (str): + Optional. Description of the EntryGroup. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + EntryGroup. + etag (str): + This checksum is computed by the service, and + might be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + transfer_status (google.cloud.dataplex_v1.types.TransferStatus): + Output only. Denotes the transfer status of + the Entry Group. It is unspecified for Entry + Group created from Dataplex API. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + transfer_status: 'TransferStatus' = proto.Field( + proto.ENUM, + number=202, + enum='TransferStatus', + ) + + +class EntryType(proto.Message): + r"""Entry Type is a template for creating Entries. + + Attributes: + name (str): + Output only. The relative resource name of the EntryType, of + the form: + projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}. + uid (str): + Output only. System generated globally unique + ID for the EntryType. This ID will be different + if the EntryType is deleted and re-created with + the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryType was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryType was + last updated. + description (str): + Optional. Description of the EntryType. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + EntryType. + etag (str): + Optional. This checksum is computed by the + service, and might be sent on update and delete + requests to ensure the client has an up-to-date + value before proceeding. + type_aliases (MutableSequence[str]): + Optional. Indicates the classes this Entry + Type belongs to, for example, TABLE, DATABASE, + MODEL. + platform (str): + Optional. The platform that Entries of this + type belongs to. + system (str): + Optional. The system that Entries of this + type belongs to. Examples include CloudSQL, + MariaDB etc + required_aspects (MutableSequence[google.cloud.dataplex_v1.types.EntryType.AspectInfo]): + AspectInfo for the entry type. + authorization (google.cloud.dataplex_v1.types.EntryType.Authorization): + Immutable. Authorization defined for this + type. + """ + + class AspectInfo(proto.Message): + r""" + + Attributes: + type_ (str): + Required aspect type for the entry type. + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + + class Authorization(proto.Message): + r"""Authorization for an Entry Type. + + Attributes: + alternate_use_permission (str): + Immutable. The IAM permission grantable on + the Entry Group to allow access to instantiate + Entries of Dataplex owned Entry Types, only + settable for Dataplex owned Types. + """ + + alternate_use_permission: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + type_aliases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + platform: str = proto.Field( + proto.STRING, + number=10, + ) + system: str = proto.Field( + proto.STRING, + number=11, + ) + required_aspects: MutableSequence[AspectInfo] = proto.RepeatedField( + proto.MESSAGE, + number=50, + message=AspectInfo, + ) + authorization: Authorization = proto.Field( + proto.MESSAGE, + number=51, + message=Authorization, + ) + + +class Aspect(proto.Message): + r"""An aspect is a single piece of metadata describing an entry. + + Attributes: + aspect_type (str): + Output only. The resource name of the type + used to create this Aspect. + path (str): + Output only. The path in the entry under + which the aspect is attached. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Aspect was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Aspect was + last updated. + data (google.protobuf.struct_pb2.Struct): + Required. The content of the aspect, + according to its aspect type schema. The maximum + size of the field is 120KB (encoded as UTF-8). + aspect_source (google.cloud.dataplex_v1.types.AspectSource): + Optional. Information related to the source + system of the aspect. + """ + + aspect_type: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=8, + message=struct_pb2.Struct, + ) + aspect_source: 'AspectSource' = proto.Field( + proto.MESSAGE, + number=9, + message='AspectSource', + ) + + +class AspectSource(proto.Message): + r"""Information related to the source system of the aspect. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the aspect was created in the source + system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time the aspect was last updated in the + source system. + data_version (str): + The version of the data format used to + produce this data. This field is used to + indicated when the underlying data format + changes (e.g., schema modifications, changes to + the source URL format definition, etc). + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + data_version: str = proto.Field( + proto.STRING, + number=12, + ) + + +class Entry(proto.Message): + r"""An entry is a representation of a data resource that can be + described by various metadata. + + Attributes: + name (str): + Identifier. The relative resource name of the entry, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. + entry_type (str): + Required. Immutable. The relative resource name of the entry + type that was used to create this entry, in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entry was + created in Dataplex. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entry was last + updated in Dataplex. + aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): + Optional. The aspects that are attached to the entry. + Depending on how the aspect is attached to the entry, the + format of the aspect key can be one of the following: + + - If the aspect is attached directly to the entry: + ``{project_id_or_number}.{location_id}.{aspect_type_id}`` + - If the aspect is attached to an entry's path: + ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` + parent_entry (str): + Optional. Immutable. The resource name of the + parent entry. + fully_qualified_name (str): + Optional. A name for the entry that can be referenced by an + external system. For more information, see `Fully qualified + names `__. + The maximum size of the field is 4000 characters. + entry_source (google.cloud.dataplex_v1.types.EntrySource): + Optional. Information related to the source + system of the data resource that is represented + by the entry. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_type: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + aspects: MutableMapping[str, 'Aspect'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=9, + message='Aspect', + ) + parent_entry: str = proto.Field( + proto.STRING, + number=10, + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=12, + ) + entry_source: 'EntrySource' = proto.Field( + proto.MESSAGE, + number=15, + message='EntrySource', + ) + + +class EntrySource(proto.Message): + r"""Information related to the source system of the data resource + that is represented by the entry. + + Attributes: + resource (str): + The name of the resource in the source + system. Maximum length is 4,000 characters. + system (str): + The name of the source system. + Maximum length is 64 characters. + platform (str): + The platform containing the source system. + Maximum length is 64 characters. + display_name (str): + A user-friendly display name. + Maximum length is 500 characters. + description (str): + A description of the data resource. + Maximum length is 2,000 characters. + labels (MutableMapping[str, str]): + User-defined labels. + The maximum size of keys and values is 128 + characters each. + ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): + Immutable. The entries representing the + ancestors of the data resource in the source + system. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was created in the + source system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was last updated in the source + system. If the entry exists in the system and its + ``EntrySource`` has ``update_time`` populated, further + updates to the ``EntrySource`` of the entry must provide + incremental updates to its ``update_time``. + location (str): + Output only. Location of the resource in the + source system. You can search the entry by this + location. By default, this should match the + location of the entry group containing this + entry. A different value allows capturing the + source location for data external to Google + Cloud. + """ + + class Ancestor(proto.Message): + r"""Information about individual items in the hierarchy that is + associated with the data resource. + + Attributes: + name (str): + Optional. The name of the ancestor resource. + type_ (str): + Optional. The type of the ancestor resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + + resource: str = proto.Field( + proto.STRING, + number=1, + ) + system: str = proto.Field( + proto.STRING, + number=2, + ) + platform: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + ancestors: MutableSequence[Ancestor] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=Ancestor, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + location: str = proto.Field( + proto.STRING, + number=12, + ) + + +class CreateEntryGroupRequest(proto.Message): + r"""Create EntryGroup Request. + + Attributes: + parent (str): + Required. The resource name of the entryGroup, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + entry_group_id (str): + Required. EntryGroup identifier. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=3, + message='EntryGroup', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntryGroupRequest(proto.Message): + r"""Update EntryGroup Request. + + Attributes: + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. The service validates the request, + without performing any mutations. The default is + false. + """ + + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntryGroupRequest(proto.Message): + r"""Delete EntryGroup Request. + + Attributes: + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteEntryGroupRequest method returns an + ABORTED error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntryGroupsRequest(proto.Message): + r"""List entryGroups request. + + Attributes: + parent (str): + Required. The resource name of the entryGroup location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of EntryGroups to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 EntryGroups. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEntryGroups`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provide to ``ListEntryGroups`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntryGroupsResponse(proto.Message): + r"""List entry groups response. + + Attributes: + entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): + Entry groups under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEntryGroupRequest(proto.Message): + r"""Get EntryGroup request. + + Attributes: + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEntryTypeRequest(proto.Message): + r"""Create EntryType Request. + + Attributes: + parent (str): + Required. The resource name of the EntryType, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + entry_type_id (str): + Required. EntryType identifier. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_type_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_type: 'EntryType' = proto.Field( + proto.MESSAGE, + number=3, + message='EntryType', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntryTypeRequest(proto.Message): + r"""Update EntryType Request. + + Attributes: + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + entry_type: 'EntryType' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryType', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntryTypeRequest(proto.Message): + r"""Delele EntryType Request. + + Attributes: + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteEntryTypeRequest method returns an ABORTED + error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntryTypesRequest(proto.Message): + r"""List EntryTypes request + + Attributes: + parent (str): + Required. The resource name of the EntryType location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of EntryTypes to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 EntryTypes. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEntryTypes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provided to ``ListEntryTypes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. + order_by (str): + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntryTypesResponse(proto.Message): + r"""List EntryTypes response. + + Attributes: + entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): + EntryTypes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + entry_types: MutableSequence['EntryType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEntryTypeRequest(proto.Message): + r"""Get EntryType request. + + Attributes: + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAspectTypeRequest(proto.Message): + r"""Create AspectType Request. + + Attributes: + parent (str): + Required. The resource name of the AspectType, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + aspect_type_id (str): + Required. AspectType identifier. + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + aspect_type_id: str = proto.Field( + proto.STRING, + number=2, + ) + aspect_type: 'AspectType' = proto.Field( + proto.MESSAGE, + number=3, + message='AspectType', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateAspectTypeRequest(proto.Message): + r"""Update AspectType Request + + Attributes: + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + aspect_type: 'AspectType' = proto.Field( + proto.MESSAGE, + number=1, + message='AspectType', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteAspectTypeRequest(proto.Message): + r"""Delele AspectType Request. + + Attributes: + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteAspectTypeRequest method returns an + ABORTED error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAspectTypesRequest(proto.Message): + r"""List AspectTypes request. + + Attributes: + parent (str): + Required. The resource name of the AspectType location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of AspectTypes to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 AspectTypes. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListAspectTypes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provide to ``ListAspectTypes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. + order_by (str): + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAspectTypesResponse(proto.Message): + r"""List AspectTypes response. + + Attributes: + aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): + AspectTypes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + aspect_types: MutableSequence['AspectType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AspectType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetAspectTypeRequest(proto.Message): + r"""Get AspectType request. + + Attributes: + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEntryRequest(proto.Message): + r"""Create Entry request. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + entry_id (str): + Required. Entry identifier. It has to be unique within an + Entry Group. + + Entries corresponding to Google Cloud resources use an Entry + ID format based on `full resource + names `__. + The format is a full resource name of the resource without + the prefix double slashes in the API service name part of + the full resource name. This allows retrieval of entries + using their associated resource name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or systems + other than Google Cloud. + + The maximum size of the field is 4000 characters. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=3, + message='Entry', + ) + + +class UpdateEntryRequest(proto.Message): + r"""Update Entry request. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. To update Aspects, the + update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + allow_missing (bool): + Optional. If set to true and the entry + doesn't exist, the service will create it. + delete_missing_aspects (bool): + Optional. If set to true and the aspect_keys specify aspect + ranges, the service deletes any existing aspects from that + range that weren't provided in the request. + aspect_keys (MutableSequence[str]): + Optional. The map keys of the Aspects which the service + should modify. It supports the following syntaxes: + + - ```` - matches an aspect of the + given type and empty path. + - ``@path`` - matches an aspect of + the given type and specified path. For example, to attach + an aspect to a field that is specified by the ``schema`` + aspect, the path should have the format + ``Schema.``. + - ``*`` - matches aspects of the + given type for all paths. + - ``*@path`` - matches aspects of all types on the given + path. + + The service will not remove existing aspects matching the + syntax unless ``delete_missing_aspects`` is set to true. + + If this field is left empty, the service treats it as + specifying exactly those Aspects present in the request. + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + delete_missing_aspects: bool = proto.Field( + proto.BOOL, + number=4, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class DeleteEntryRequest(proto.Message): + r"""Delete Entry request. + + Attributes: + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEntriesRequest(proto.Message): + r"""List Entries request. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + page_size (int): + Optional. Number of items to return per page. If there are + remaining results, the service returns a next_page_token. If + unspecified, the service returns at most 10 Entries. The + maximum value is 100; values above 100 will be coerced to + 100. + page_token (str): + Optional. Page token received from a previous + ``ListEntries`` call. Provide this to retrieve the + subsequent page. + filter (str): + Optional. A filter on the entries to return. Filters are + case-sensitive. You can filter the request by the following + fields: + + - entry_type + - entry_source.display_name + + The comparison operators are =, !=, <, >, <=, >=. The + service compares strings according to lexical order. + + You can use the logical operators AND, OR, NOT in the + filter. + + You can use Wildcard "*", but for entry_type you need to + provide the full project id or number. + + Example filter expressions: + + - "entry_source.display_name=AnExampleDisplayName" + - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" + - "entry_type=projects/example-project/locations/us/entryTypes/a\* + OR entry_type=projects/another-project/locations/*" + - "NOT entry_source.display_name=AnotherExampleDisplayName". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListEntriesResponse(proto.Message): + r"""List Entries response. + + Attributes: + entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): + The list of entries under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + entries: MutableSequence['Entry'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entry', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEntryRequest(proto.Message): + r"""Get Entry request. + + Attributes: + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + view (google.cloud.dataplex_v1.types.EntryView): + Optional. View to control which parts of an + entry the service should return. + aspect_types (MutableSequence[str]): + Optional. Limits the aspects returned to the + provided aspect types. It only works for CUSTOM + view. + paths (MutableSequence[str]): + Optional. Limits the aspects returned to + those associated with the provided paths within + the Entry. It only works for CUSTOM view. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'EntryView' = proto.Field( + proto.ENUM, + number=2, + enum='EntryView', + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class LookupEntryRequest(proto.Message): + r"""Lookup Entry request using permissions in the source system. + + Attributes: + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/{location}``. + view (google.cloud.dataplex_v1.types.EntryView): + Optional. View to control which parts of an + entry the service should return. + aspect_types (MutableSequence[str]): + Optional. Limits the aspects returned to the + provided aspect types. It only works for CUSTOM + view. + paths (MutableSequence[str]): + Optional. Limits the aspects returned to + those associated with the provided paths within + the Entry. It only works for CUSTOM view. + entry (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'EntryView' = proto.Field( + proto.ENUM, + number=2, + enum='EntryView', + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + entry: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchEntriesRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/{location}``. + query (str): + Required. The query against which entries in + scope should be matched. + page_size (int): + Optional. Number of results in the search page. If <=0, then + defaults to 10. Max limit for page_size is 1000. Throws an + invalid argument for page_size > 1000. + page_token (str): + Optional. Page token received from a previous + ``SearchEntries`` call. Provide this to retrieve the + subsequent page. + order_by (str): + Optional. Specifies the ordering of results. + scope (str): + Optional. The scope under which the search should be + operating. It must either be ``organizations/`` or + ``projects/``. If it is unspecified, it + defaults to the organization where the project provided in + ``name`` is located. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + scope: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SearchEntriesResult(proto.Message): + r"""A single result of a SearchEntries request. + + Attributes: + linked_resource (str): + Linked resource name. + dataplex_entry (google.cloud.dataplex_v1.types.Entry): + + snippets (google.cloud.dataplex_v1.types.SearchEntriesResult.Snippets): + Snippets. + """ + + class Snippets(proto.Message): + r"""Snippets for the entry, contains HTML-style highlighting for + matched tokens, will be used in UI. + + Attributes: + dataplex_entry (google.cloud.dataplex_v1.types.Entry): + Entry + """ + + dataplex_entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + + linked_resource: str = proto.Field( + proto.STRING, + number=8, + ) + dataplex_entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=9, + message='Entry', + ) + snippets: Snippets = proto.Field( + proto.MESSAGE, + number=12, + message=Snippets, + ) + + +class SearchEntriesResponse(proto.Message): + r""" + + Attributes: + results (MutableSequence[google.cloud.dataplex_v1.types.SearchEntriesResult]): + The results matching the search query. + total_size (int): + The estimated total number of matching + entries. This number isn't guaranteed to be + accurate. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that the service couldn't reach. + Search results don't include data from these + locations. + """ + + @property + def raw_page(self): + return self + + results: MutableSequence['SearchEntriesResult'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SearchEntriesResult', + ) + total_size: int = proto.Field( + proto.INT32, + number=2, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class ImportItem(proto.Message): + r"""An object that describes the values that you want to set for an + entry and its attached aspects when you import metadata. Used when + you run a metadata import job. See + [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. + + You provide a collection of import items in a metadata import file. + For more information about how to create a metadata import file, see + `Metadata import + file `__. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Information about an entry and its attached + aspects. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update, in paths that are relative to the + ``Entry`` resource. Separate each field with a comma. + + In ``FULL`` entry sync mode, Dataplex includes the paths of + all of the fields for an entry that can be modified, + including aspects. This means that Dataplex replaces the + existing entry with the entry in the metadata import file. + All modifiable fields are updated, regardless of the fields + that are listed in the update mask, and regardless of + whether a field is present in the ``entry`` object. + + The ``update_mask`` field is ignored when an entry is + created or re-created. + + Dataplex also determines which entries and aspects to modify + by comparing the values and timestamps that you provide in + the metadata import file with the values and timestamps that + exist in your project. For more information, see `Comparison + logic `__. + aspect_keys (MutableSequence[str]): + The aspects to modify. Supports the following syntaxes: + + - ``{aspect_type_reference}``: matches aspects that belong + to the specified aspect type and are attached directly to + the entry. + - ``{aspect_type_reference}@{path}``: matches aspects that + belong to the specified aspect type and path. + - ``{aspect_type_reference}@*``: matches aspects that + belong to the specified aspect type for all paths. + + Replace ``{aspect_type_reference}`` with a reference to the + aspect type, in the format + ``{project_id_or_number}.{location_id}.{aspect_type_id}``. + + If you leave this field empty, it is treated as specifying + exactly those aspects that are present within the specified + entry. + + In ``FULL`` entry sync mode, Dataplex implicitly adds the + keys for all of the required aspects of an entry. + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateMetadataJobRequest(proto.Message): + r"""Create metadata job request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique ID + is generated with the prefix ``metadata-job-``. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + metadata_job: 'MetadataJob' = proto.Field( + proto.MESSAGE, + number=2, + message='MetadataJob', + ) + metadata_job_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetMetadataJobRequest(proto.Message): + r"""Get metadata job request. + + Attributes: + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMetadataJobsRequest(proto.Message): + r"""List metadata jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + page_size (int): + Optional. The maximum number of metadata jobs + to return. The service might return fewer jobs + than this value. If unspecified, at most 10 jobs + are returned. The maximum value is 1,000. + page_token (str): + Optional. The page token received from a previous + ``ListMetadataJobs`` call. Provide this token to retrieve + the subsequent page of results. When paginating, all other + parameters that are provided to the ``ListMetadataJobs`` + request must match the call that provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - ``labels.key1 = "value1"`` + - ``labels:key1`` + - ``name = "value"`` + + You can combine filters with ``AND``, ``OR``, and ``NOT`` + operators. + order_by (str): + Optional. The field to sort the results by, either ``name`` + or ``create_time``. If not specified, the ordering is + undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMetadataJobsResponse(proto.Message): + r"""List metadata jobs response. + + Attributes: + metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): + Metadata jobs under the specified parent + location. + next_page_token (str): + A token to retrieve the next page of results. + If there are no more results in the list, the + value is empty. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + metadata_jobs: MutableSequence['MetadataJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MetadataJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CancelMetadataJobRequest(proto.Message): + r"""Cancel metadata job request. + + Attributes: + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataJob(proto.Message): + r"""A metadata job resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The name of the resource that the + configuration is applied to, in the format + ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + uid (str): + Output only. A system-generated, globally + unique ID for the metadata job. If the metadata + job is deleted and then re-created with the same + name, this ID is different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels. + type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): + Required. Metadata job type. + import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): + Import job specification. + + This field is a member of `oneof`_ ``spec``. + import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): + Output only. Import job result. + + This field is a member of `oneof`_ ``result``. + status (google.cloud.dataplex_v1.types.MetadataJob.Status): + Output only. Metadata job status. + """ + class Type(proto.Enum): + r"""Metadata job type. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + IMPORT (1): + Import job. + """ + TYPE_UNSPECIFIED = 0 + IMPORT = 1 + + class ImportJobResult(proto.Message): + r"""Results from a metadata import job. + + Attributes: + deleted_entries (int): + Output only. The total number of entries that + were deleted. + updated_entries (int): + Output only. The total number of entries that + were updated. + created_entries (int): + Output only. The total number of entries that + were created. + unchanged_entries (int): + Output only. The total number of entries that + were unchanged. + recreated_entries (int): + Output only. The total number of entries that + were recreated. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + + deleted_entries: int = proto.Field( + proto.INT64, + number=1, + ) + updated_entries: int = proto.Field( + proto.INT64, + number=2, + ) + created_entries: int = proto.Field( + proto.INT64, + number=3, + ) + unchanged_entries: int = proto.Field( + proto.INT64, + number=4, + ) + recreated_entries: int = proto.Field( + proto.INT64, + number=6, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + class ImportJobSpec(proto.Message): + r"""Job specification for a metadata import job + + Attributes: + source_storage_uri (str): + Optional. The URI of a Cloud Storage bucket or folder + (beginning with ``gs://`` and ending with ``/``) that + contains the metadata import files for this job. + + A metadata import file defines the values to set for each of + the entries and aspects in a metadata job. For more + information about how to create a metadata import file and + the file requirements, see `Metadata import + file `__. + + You can provide multiple metadata import files in the same + metadata job. The bucket or folder must contain at least one + metadata import file, in JSON Lines format (either ``.json`` + or ``.jsonl`` file extension). + + In ``FULL`` entry sync mode, don't save the metadata import + file in a folder named ``SOURCE_STORAGE_URI/deletions/``. + + **Caution**: If the metadata import file contains no data, + all entries and aspects that belong to the job's scope are + deleted. + source_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the process that + created the metadata import files began. + scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): + Required. A boundary on the scope of impact + that the metadata import job can have. + entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for entries. Only ``FULL`` mode is + supported for entries. All entries in the job's scope are + modified. If an entry exists in Dataplex but isn't included + in the metadata import file, the entry is deleted when you + run the metadata job. + aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for aspects. Only ``INCREMENTAL`` + mode is supported for aspects. An aspect is modified only if + the metadata import file includes a reference to the aspect + in the ``update_mask`` field and the ``aspect_keys`` field. + log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): + Optional. The level of logs to write to Cloud Logging for + this job. + + Debug-level logs provide highly-detailed information for + troubleshooting, but their increased verbosity could incur + `additional + costs `__ that + might not be merited for all jobs. + + If unspecified, defaults to ``INFO``. + """ + class SyncMode(proto.Enum): + r"""Specifies how the entries and aspects in a metadata job are + updated. + + Values: + SYNC_MODE_UNSPECIFIED (0): + Sync mode unspecified. + FULL (1): + All resources in the job's scope are + modified. If a resource exists in Dataplex but + isn't included in the metadata import file, the + resource is deleted when you run the metadata + job. Use this mode to perform a full sync of the + set of entries in the job scope. + INCREMENTAL (2): + Only the entries and aspects that are + explicitly included in the metadata import file + are modified. Use this mode to modify a subset + of resources while leaving unreferenced + resources unchanged. + """ + SYNC_MODE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + + class LogLevel(proto.Enum): + r"""The level of logs to write to Cloud Logging for this job. + + Values: + LOG_LEVEL_UNSPECIFIED (0): + Log level unspecified. + DEBUG (1): + Debug-level logging. Captures detailed logs for each import + item. Use debug-level logging to troubleshoot issues with + specific import items. For example, use debug-level logging + to identify resources that are missing from the job scope, + entries or aspects that don't conform to the associated + entry type or aspect type, or other misconfigurations with + the metadata import file. + + Depending on the size of your metadata job and the number of + logs that are generated, debug-level logging might incur + `additional + costs `__. + INFO (2): + Info-level logging. Captures logs at the + overall job level. Includes aggregate logs about + import items, but doesn't specify which import + item has an error. + """ + LOG_LEVEL_UNSPECIFIED = 0 + DEBUG = 1 + INFO = 2 + + class ImportJobScope(proto.Message): + r"""A boundary on the scope of impact that the metadata import + job can have. + + Attributes: + entry_groups (MutableSequence[str]): + Required. The entry group that is in scope for the import + job, specified as a relative resource name in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. + Only entries that belong to the specified entry group are + affected by the job. + + Must contain exactly one element. The entry group and the + job must be in the same location. + entry_types (MutableSequence[str]): + Required. The entry types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. + The job modifies only the entries that belong to these entry + types. + + If the metadata import file attempts to modify an entry + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an entry type must either match the location + of the job, or the entry type must be global. + aspect_types (MutableSequence[str]): + Optional. The aspect types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + The job modifies only the aspects that belong to these + aspect types. + + If the metadata import file attempts to modify an aspect + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an aspect type must either match the + location of the job, or the aspect type must be global. + """ + + entry_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entry_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + source_storage_uri: str = proto.Field( + proto.STRING, + number=1, + ) + source_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + scope: 'MetadataJob.ImportJobSpec.ImportJobScope' = proto.Field( + proto.MESSAGE, + number=2, + message='MetadataJob.ImportJobSpec.ImportJobScope', + ) + entry_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( + proto.ENUM, + number=3, + enum='MetadataJob.ImportJobSpec.SyncMode', + ) + aspect_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( + proto.ENUM, + number=4, + enum='MetadataJob.ImportJobSpec.SyncMode', + ) + log_level: 'MetadataJob.ImportJobSpec.LogLevel' = proto.Field( + proto.ENUM, + number=6, + enum='MetadataJob.ImportJobSpec.LogLevel', + ) + + class Status(proto.Message): + r"""Metadata job status. + + Attributes: + state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): + Output only. State of the metadata job. + message (str): + Output only. Message relating to the + progression of a metadata job. + completion_percent (int): + Output only. Progress tracking. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + class State(proto.Enum): + r"""State of a metadata job. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + QUEUED (1): + The job is queued. + RUNNING (2): + The job is running. + CANCELING (3): + The job is being canceled. + CANCELED (4): + The job is canceled. + SUCCEEDED (5): + The job succeeded. + FAILED (6): + The job failed. + SUCCEEDED_WITH_ERRORS (7): + The job completed with some errors. + """ + STATE_UNSPECIFIED = 0 + QUEUED = 1 + RUNNING = 2 + CANCELING = 3 + CANCELED = 4 + SUCCEEDED = 5 + FAILED = 6 + SUCCEEDED_WITH_ERRORS = 7 + + state: 'MetadataJob.Status.State' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataJob.Status.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + completion_percent: int = proto.Field( + proto.INT32, + number=3, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + import_spec: ImportJobSpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=ImportJobSpec, + ) + import_result: ImportJobResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=ImportJobResult, + ) + status: Status = proto.Field( + proto.MESSAGE, + number=7, + message=Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py new file mode 100644 index 000000000000..d78845393886 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'CreateContentRequest', + 'UpdateContentRequest', + 'DeleteContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'GetContentRequest', + }, +) + + +class CreateContentRequest(proto.Message): + r"""Create content request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + content: analyze.Content = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Content, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateContentRequest(proto.Message): + r"""Update content request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + content: analyze.Content = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Content, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteContentRequest(proto.Message): + r"""Delete content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListContentRequest(proto.Message): + r"""List content request. Returns the BASIC Content view. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + page_size (int): + Optional. Maximum number of content to + return. The service may return fewer than this + value. If unspecified, at most 10 content will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListContent`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListContent`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + following formats are supported: + + labels.key1 = "value1" labels:key1 type = "NOTEBOOK" type = + "SQL_SCRIPT" + + These restrictions can be coinjoined with AND, OR and NOT + conjunctions. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListContentResponse(proto.Message): + r"""List content response. + + Attributes: + content (MutableSequence[google.cloud.dataplex_v1.types.Content]): + Content under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + content: MutableSequence[analyze.Content] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Content, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetContentRequest(proto.Message): + r"""Get content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + view (google.cloud.dataplex_v1.types.GetContentRequest.ContentView): + Optional. Specify content view to make a + partial request. + """ + class ContentView(proto.Enum): + r"""Specifies whether the request should return the full or the + partial representation. + + Values: + CONTENT_VIEW_UNSPECIFIED (0): + Content view not specified. Defaults to + BASIC. The API will default to the BASIC view. + BASIC (1): + Will not return the ``data_text`` field. + FULL (2): + Returns the complete proto. + """ + CONTENT_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: ContentView = proto.Field( + proto.ENUM, + number=2, + enum=ContentView, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py new file mode 100644 index 000000000000..434fb578a40b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataDiscoverySpec', + 'DataDiscoveryResult', + }, +) + + +class DataDiscoverySpec(proto.Message): + r"""Spec for a data discovery scan. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bigquery_publishing_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig): + Optional. Configuration for metadata + publishing. + storage_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig): + Cloud Storage related configurations. + + This field is a member of `oneof`_ ``resource_config``. + """ + + class BigQueryPublishingConfig(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + table_type (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig.TableType): + Optional. Determines whether to publish + discovered tables as BigLake external tables or + non-BigLake external tables. + connection (str): + Optional. The BigQuery connection used to create BigLake + tables. Must be in the form + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + """ + class TableType(proto.Enum): + r"""Determines how discovered tables are published. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + Table type unspecified. + EXTERNAL (1): + Default. Discovered tables are published as + BigQuery external tables whose data is accessed + using the credentials of the user querying the + table. + BIGLAKE (2): + Discovered tables are published as BigLake + external tables whose data is accessed using the + credentials of the associated BigQuery + connection. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL = 1 + BIGLAKE = 2 + + table_type: 'DataDiscoverySpec.BigQueryPublishingConfig.TableType' = proto.Field( + proto.ENUM, + number=2, + enum='DataDiscoverySpec.BigQueryPublishingConfig.TableType', + ) + connection: str = proto.Field( + proto.STRING, + number=3, + ) + + class StorageConfig(proto.Message): + r"""Configurations related to Cloud Storage as the data source. + + Attributes: + include_patterns (MutableSequence[str]): + Optional. Defines the data to include during + discovery when only a subset of the data should + be considered. Provide a list of patterns that + identify the data to include. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + exclude_patterns (MutableSequence[str]): + Optional. Defines the data to exclude during + discovery. Provide a list of patterns that + identify the data to exclude. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + csv_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.JsonOptions): + Optional. Configuration for JSON data. + """ + + class CsvOptions(proto.Message): + r"""Describes CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter that is used to separate values. The + default is ``,`` (comma). + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for CSV data. If true, all columns + are registered as strings. + quote (str): + Optional. The character used to quote column values. Accepts + ``"`` (double quotation mark) or ``'`` (single quotation + mark). If unspecified, defaults to ``"`` (double quotation + mark). + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + quote: str = proto.Field( + proto.STRING, + number=5, + ) + + class JsonOptions(proto.Message): + r"""Describes JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for JSON data. If true, all columns + are registered as their primitive types + (strings, number, or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + csv_options: 'DataDiscoverySpec.StorageConfig.CsvOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='DataDiscoverySpec.StorageConfig.CsvOptions', + ) + json_options: 'DataDiscoverySpec.StorageConfig.JsonOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='DataDiscoverySpec.StorageConfig.JsonOptions', + ) + + bigquery_publishing_config: BigQueryPublishingConfig = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishingConfig, + ) + storage_config: StorageConfig = proto.Field( + proto.MESSAGE, + number=100, + oneof='resource_config', + message=StorageConfig, + ) + + +class DataDiscoveryResult(proto.Message): + r"""The output of a data discovery scan. + + Attributes: + bigquery_publishing (google.cloud.dataplex_v1.types.DataDiscoveryResult.BigQueryPublishing): + Output only. Configuration for metadata + publishing. + """ + + class BigQueryPublishing(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + dataset (str): + Output only. The BigQuery dataset to publish to. It takes + the form ``projects/{project_id}/datasets/{dataset_id}``. If + not set, the service creates a default publishing dataset. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + + bigquery_publishing: BigQueryPublishing = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishing, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py new file mode 100644 index 000000000000..1e5f3aad0d9f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py @@ -0,0 +1,540 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import processing + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataProfileSpec', + 'DataProfileResult', + }, +) + + +class DataProfileSpec(proto.Message): + r"""DataProfileScan related setting. + + Attributes: + sampling_percent (float): + Optional. The percentage of the records to be selected from + the dataset for DataScan. + + - Value can range between 0.0 and 100.0 with up to 3 + significant decimal digits. + - Sampling is not applied if ``sampling_percent`` is not + specified, 0 or + + 100. + row_filter (str): + Optional. A filter applied to all rows in a + single DataScan job. The filter needs to be a + valid SQL expression for a WHERE clause in + BigQuery standard SQL syntax. + Example: col1 >= 0 AND col2 < 10 + post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): + Optional. Actions to take upon job + completion.. + include_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): + Optional. The fields to include in data profile. + + If not specified, all fields at the time of profile scan job + execution are included, except for ones listed in + ``exclude_fields``. + exclude_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): + Optional. The fields to exclude from data profile. + + If specified, the fields will be excluded from data profile, + regardless of ``include_fields`` value. + """ + + class PostScanActions(proto.Message): + r"""The configuration of post scan actions of DataProfileScan + job. + + Attributes: + bigquery_export (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions.BigQueryExport): + Optional. If set, results will be exported to + the provided BigQuery table. + """ + + class BigQueryExport(proto.Message): + r"""The configuration of BigQuery export post scan action. + + Attributes: + results_table (str): + Optional. The BigQuery table to export DataProfileScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + """ + + results_table: str = proto.Field( + proto.STRING, + number=1, + ) + + bigquery_export: 'DataProfileSpec.PostScanActions.BigQueryExport' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileSpec.PostScanActions.BigQueryExport', + ) + + class SelectedFields(proto.Message): + r"""The specification for fields to include or exclude in data + profile scan. + + Attributes: + field_names (MutableSequence[str]): + Optional. Expected input is a list of fully + qualified names of fields as in the schema. + + Only top-level field names for nested fields are + supported. For instance, if 'x' is of nested + field type, listing 'x' is supported but 'x.y.z' + is not supported. Here 'y' and 'y.z' are nested + fields of 'x'. + """ + + field_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=2, + ) + row_filter: str = proto.Field( + proto.STRING, + number=3, + ) + post_scan_actions: PostScanActions = proto.Field( + proto.MESSAGE, + number=4, + message=PostScanActions, + ) + include_fields: SelectedFields = proto.Field( + proto.MESSAGE, + number=5, + message=SelectedFields, + ) + exclude_fields: SelectedFields = proto.Field( + proto.MESSAGE, + number=6, + message=SelectedFields, + ) + + +class DataProfileResult(proto.Message): + r"""DataProfileResult defines the output of DataProfileScan. Each + field of the table will have field type specific profile result. + + Attributes: + row_count (int): + The count of rows scanned. + profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): + The profile information per field. + scanned_data (google.cloud.dataplex_v1.types.ScannedData): + The data scanned for this result. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): + Output only. The result of post scan actions. + """ + + class Profile(proto.Message): + r"""Contains name, type, mode and field type specific profile + information. + + Attributes: + fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): + List of fields with structural and profile + information for each field. + """ + + class Field(proto.Message): + r"""A field within a table. + + Attributes: + name (str): + The name of the field. + type_ (str): + The data type retrieved from the schema of the data source. + For instance, for a BigQuery native table, it is the + `BigQuery Table + Schema `__. + For a Dataplex Entity, it is the `Entity + Schema `__. + mode (str): + The mode of the field. Possible values include: + + - REQUIRED, if it is a required field. + - NULLABLE, if it is an optional field. + - REPEATED, if it is a repeated field. + profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): + Profile information for the corresponding + field. + """ + + class ProfileInfo(proto.Message): + r"""The profile information for each field type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + null_ratio (float): + Ratio of rows with null value against total + scanned rows. + distinct_ratio (float): + Ratio of rows with distinct values against + total scanned rows. Not available for complex + non-groupable field type, including RECORD, + ARRAY, GEOGRAPHY, and JSON, as well as fields + with REPEATABLE mode. + top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): + The list of top N non-null values, frequency + and ratio with which they occur in the scanned + data. N is 10 or equal to the number of distinct + values in the field, whichever is smaller. Not + available for complex non-groupable field type, + including RECORD, ARRAY, GEOGRAPHY, and JSON, as + well as fields with REPEATABLE mode. + string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): + String type field information. + + This field is a member of `oneof`_ ``field_info``. + integer_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo): + Integer type field information. + + This field is a member of `oneof`_ ``field_info``. + double_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo): + Double type field information. + + This field is a member of `oneof`_ ``field_info``. + """ + + class StringFieldInfo(proto.Message): + r"""The profile information for a string type field. + + Attributes: + min_length (int): + Minimum length of non-null values in the + scanned data. + max_length (int): + Maximum length of non-null values in the + scanned data. + average_length (float): + Average length of non-null values in the + scanned data. + """ + + min_length: int = proto.Field( + proto.INT64, + number=1, + ) + max_length: int = proto.Field( + proto.INT64, + number=2, + ) + average_length: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class IntegerFieldInfo(proto.Message): + r"""The profile information for an integer type field. + + Attributes: + average (float): + Average of non-null values in the scanned + data. NaN, if the field has a NaN. + standard_deviation (float): + Standard deviation of non-null values in the + scanned data. NaN, if the field has a NaN. + min_ (int): + Minimum of non-null values in the scanned + data. NaN, if the field has a NaN. + quartiles (MutableSequence[int]): + A quartile divides the number of data points + into four parts, or quarters, of more-or-less + equal size. Three main quartiles used are: The + first quartile (Q1) splits off the lowest 25% of + data from the highest 75%. It is also known as + the lower or 25th empirical quartile, as 25% of + the data is below this point. The second + quartile (Q2) is the median of a data set. So, + 50% of the data lies below this point. The third + quartile (Q3) splits off the highest 25% of data + from the lowest 75%. It is known as the upper or + 75th empirical quartile, as 75% of the data lies + below this point. Here, the quartiles is + provided as an ordered list of approximate + quartile values for the scanned data, occurring + in order Q1, median, Q3. + max_ (int): + Maximum of non-null values in the scanned + data. NaN, if the field has a NaN. + """ + + average: float = proto.Field( + proto.DOUBLE, + number=1, + ) + standard_deviation: float = proto.Field( + proto.DOUBLE, + number=3, + ) + min_: int = proto.Field( + proto.INT64, + number=4, + ) + quartiles: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=6, + ) + max_: int = proto.Field( + proto.INT64, + number=5, + ) + + class DoubleFieldInfo(proto.Message): + r"""The profile information for a double type field. + + Attributes: + average (float): + Average of non-null values in the scanned + data. NaN, if the field has a NaN. + standard_deviation (float): + Standard deviation of non-null values in the + scanned data. NaN, if the field has a NaN. + min_ (float): + Minimum of non-null values in the scanned + data. NaN, if the field has a NaN. + quartiles (MutableSequence[float]): + A quartile divides the number of data points + into four parts, or quarters, of more-or-less + equal size. Three main quartiles used are: The + first quartile (Q1) splits off the lowest 25% of + data from the highest 75%. It is also known as + the lower or 25th empirical quartile, as 25% of + the data is below this point. The second + quartile (Q2) is the median of a data set. So, + 50% of the data lies below this point. The third + quartile (Q3) splits off the highest 25% of data + from the lowest 75%. It is known as the upper or + 75th empirical quartile, as 75% of the data lies + below this point. Here, the quartiles is + provided as an ordered list of quartile values + for the scanned data, occurring in order Q1, + median, Q3. + max_ (float): + Maximum of non-null values in the scanned + data. NaN, if the field has a NaN. + """ + + average: float = proto.Field( + proto.DOUBLE, + number=1, + ) + standard_deviation: float = proto.Field( + proto.DOUBLE, + number=3, + ) + min_: float = proto.Field( + proto.DOUBLE, + number=4, + ) + quartiles: MutableSequence[float] = proto.RepeatedField( + proto.DOUBLE, + number=6, + ) + max_: float = proto.Field( + proto.DOUBLE, + number=5, + ) + + class TopNValue(proto.Message): + r"""Top N non-null values in the scanned data. + + Attributes: + value (str): + String value of a top N non-null value. + count (int): + Count of the corresponding value in the + scanned data. + ratio (float): + Ratio of the corresponding value in the field + against the total number of rows in the scanned + data. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + ratio: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + null_ratio: float = proto.Field( + proto.DOUBLE, + number=2, + ) + distinct_ratio: float = proto.Field( + proto.DOUBLE, + number=3, + ) + top_n_values: MutableSequence['DataProfileResult.Profile.Field.ProfileInfo.TopNValue'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='DataProfileResult.Profile.Field.ProfileInfo.TopNValue', + ) + string_profile: 'DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo' = proto.Field( + proto.MESSAGE, + number=101, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo', + ) + integer_profile: 'DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo' = proto.Field( + proto.MESSAGE, + number=102, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo', + ) + double_profile: 'DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo' = proto.Field( + proto.MESSAGE, + number=103, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + profile: 'DataProfileResult.Profile.Field.ProfileInfo' = proto.Field( + proto.MESSAGE, + number=4, + message='DataProfileResult.Profile.Field.ProfileInfo', + ) + + fields: MutableSequence['DataProfileResult.Profile.Field'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfileResult.Profile.Field', + ) + + class PostScanActionsResult(proto.Message): + r"""The result of post scan actions of DataProfileScan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult): + Output only. The result of BigQuery export + post scan action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult.State): + Output only. Execution state for the BigQuery + exporting. + message (str): + Output only. Additional information about the + BigQuery exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileResult.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileResult.PostScanActionsResult.BigQueryExportResult', + ) + + row_count: int = proto.Field( + proto.INT64, + number=3, + ) + profile: Profile = proto.Field( + proto.MESSAGE, + number=4, + message=Profile, + ) + scanned_data: processing.ScannedData = proto.Field( + proto.MESSAGE, + number=5, + message=processing.ScannedData, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=6, + message=PostScanActionsResult, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py new file mode 100644 index 000000000000..4f5adfd361ad --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py @@ -0,0 +1,912 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import processing + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataQualitySpec', + 'DataQualityResult', + 'DataQualityRuleResult', + 'DataQualityDimensionResult', + 'DataQualityDimension', + 'DataQualityRule', + 'DataQualityColumnResult', + }, +) + + +class DataQualitySpec(proto.Message): + r"""DataQualityScan related setting. + + Attributes: + rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): + Required. The list of rules to evaluate + against a data source. At least one rule is + required. + sampling_percent (float): + Optional. The percentage of the records to be selected from + the dataset for DataScan. + + - Value can range between 0.0 and 100.0 with up to 3 + significant decimal digits. + - Sampling is not applied if ``sampling_percent`` is not + specified, 0 or + + 100. + row_filter (str): + Optional. A filter applied to all rows in a + single DataScan job. The filter needs to be a + valid SQL expression for a WHERE clause in + BigQuery standard SQL syntax. + Example: col1 >= 0 AND col2 < 10 + post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): + Optional. Actions to take upon job + completion. + """ + + class PostScanActions(proto.Message): + r"""The configuration of post scan actions of DataQualityScan. + + Attributes: + bigquery_export (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.BigQueryExport): + Optional. If set, results will be exported to + the provided BigQuery table. + notification_report (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.NotificationReport): + Optional. If set, results will be sent to the + provided notification receipts upon triggers. + """ + + class BigQueryExport(proto.Message): + r"""The configuration of BigQuery export post scan action. + + Attributes: + results_table (str): + Optional. The BigQuery table to export DataQualityScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + """ + + results_table: str = proto.Field( + proto.STRING, + number=1, + ) + + class Recipients(proto.Message): + r"""The individuals or groups who are designated to receive + notifications upon triggers. + + Attributes: + emails (MutableSequence[str]): + Optional. The email recipients who will + receive the DataQualityScan results report. + """ + + emails: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class ScoreThresholdTrigger(proto.Message): + r"""This trigger is triggered when the DQ score in the job result + is less than a specified input score. + + Attributes: + score_threshold (float): + Optional. The score range is in [0,100]. + """ + + score_threshold: float = proto.Field( + proto.FLOAT, + number=2, + ) + + class JobFailureTrigger(proto.Message): + r"""This trigger is triggered when the scan job itself fails, + regardless of the result. + + """ + + class JobEndTrigger(proto.Message): + r"""This trigger is triggered whenever a scan job run ends, + regardless of the result. + + """ + + class NotificationReport(proto.Message): + r"""The configuration of notification report post scan action. + + Attributes: + recipients (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.Recipients): + Required. The recipients who will receive the + notification report. + score_threshold_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.ScoreThresholdTrigger): + Optional. If set, report will be sent when + score threshold is met. + job_failure_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobFailureTrigger): + Optional. If set, report will be sent when a + scan job fails. + job_end_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobEndTrigger): + Optional. If set, report will be sent when a + scan job ends. + """ + + recipients: 'DataQualitySpec.PostScanActions.Recipients' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualitySpec.PostScanActions.Recipients', + ) + score_threshold_trigger: 'DataQualitySpec.PostScanActions.ScoreThresholdTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='DataQualitySpec.PostScanActions.ScoreThresholdTrigger', + ) + job_failure_trigger: 'DataQualitySpec.PostScanActions.JobFailureTrigger' = proto.Field( + proto.MESSAGE, + number=4, + message='DataQualitySpec.PostScanActions.JobFailureTrigger', + ) + job_end_trigger: 'DataQualitySpec.PostScanActions.JobEndTrigger' = proto.Field( + proto.MESSAGE, + number=5, + message='DataQualitySpec.PostScanActions.JobEndTrigger', + ) + + bigquery_export: 'DataQualitySpec.PostScanActions.BigQueryExport' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualitySpec.PostScanActions.BigQueryExport', + ) + notification_report: 'DataQualitySpec.PostScanActions.NotificationReport' = proto.Field( + proto.MESSAGE, + number=2, + message='DataQualitySpec.PostScanActions.NotificationReport', + ) + + rules: MutableSequence['DataQualityRule'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataQualityRule', + ) + sampling_percent: float = proto.Field( + proto.FLOAT, + number=4, + ) + row_filter: str = proto.Field( + proto.STRING, + number=5, + ) + post_scan_actions: PostScanActions = proto.Field( + proto.MESSAGE, + number=6, + message=PostScanActions, + ) + + +class DataQualityResult(proto.Message): + r"""The output of a DataQualityScan. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + passed (bool): + Overall data quality result -- ``true`` if all rules passed. + score (float): + Output only. The overall data quality score. + + The score ranges between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): + A list of results at the dimension level. + + A dimension will have a corresponding + ``DataQualityDimensionResult`` if and only if there is at + least one rule with the 'dimension' field set to it. + columns (MutableSequence[google.cloud.dataplex_v1.types.DataQualityColumnResult]): + Output only. A list of results at the column level. + + A column will have a corresponding + ``DataQualityColumnResult`` if and only if there is at least + one rule with the 'column' field set to it. + rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRuleResult]): + A list of all the rules in a job, and their + results. + row_count (int): + The count of rows processed. + scanned_data (google.cloud.dataplex_v1.types.ScannedData): + The data scanned for this result. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): + Output only. The result of post scan actions. + """ + + class PostScanActionsResult(proto.Message): + r"""The result of post scan actions of DataQualityScan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult): + Output only. The result of BigQuery export + post scan action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult.State): + Output only. Execution state for the BigQuery + exporting. + message (str): + Output only. Additional information about the + BigQuery exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataQualityResult.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityResult.PostScanActionsResult.BigQueryExportResult', + ) + + passed: bool = proto.Field( + proto.BOOL, + number=5, + ) + score: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataQualityDimensionResult', + ) + columns: MutableSequence['DataQualityColumnResult'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='DataQualityColumnResult', + ) + rules: MutableSequence['DataQualityRuleResult'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='DataQualityRuleResult', + ) + row_count: int = proto.Field( + proto.INT64, + number=4, + ) + scanned_data: processing.ScannedData = proto.Field( + proto.MESSAGE, + number=7, + message=processing.ScannedData, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=8, + message=PostScanActionsResult, + ) + + +class DataQualityRuleResult(proto.Message): + r"""DataQualityRuleResult provides a more detailed, per-rule view + of the results. + + Attributes: + rule (google.cloud.dataplex_v1.types.DataQualityRule): + The rule specified in the DataQualitySpec, as + is. + passed (bool): + Whether the rule passed or failed. + evaluated_count (int): + The number of rows a rule was evaluated against. + + This field is only valid for row-level type rules. + + Evaluated count can be configured to either + + - include all rows (default) - with ``null`` rows + automatically failing rule evaluation, or + - exclude ``null`` rows from the ``evaluated_count``, by + setting ``ignore_nulls = true``. + passed_count (int): + The number of rows which passed a rule + evaluation. + This field is only valid for row-level type + rules. + null_count (int): + The number of rows with null values in the + specified column. + pass_ratio (float): + The ratio of **passed_count / evaluated_count**. + + This field is only valid for row-level type rules. + failing_rows_query (str): + The query to find rows that did not pass this + rule. + This field is only valid for row-level type + rules. + assertion_row_count (int): + Output only. The number of rows returned by + the SQL statement in a SQL assertion rule. + + This field is only valid for SQL assertion + rules. + """ + + rule: 'DataQualityRule' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityRule', + ) + passed: bool = proto.Field( + proto.BOOL, + number=7, + ) + evaluated_count: int = proto.Field( + proto.INT64, + number=9, + ) + passed_count: int = proto.Field( + proto.INT64, + number=8, + ) + null_count: int = proto.Field( + proto.INT64, + number=5, + ) + pass_ratio: float = proto.Field( + proto.DOUBLE, + number=6, + ) + failing_rows_query: str = proto.Field( + proto.STRING, + number=10, + ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=11, + ) + + +class DataQualityDimensionResult(proto.Message): + r"""DataQualityDimensionResult provides a more detailed, + per-dimension view of the results. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (google.cloud.dataplex_v1.types.DataQualityDimension): + Output only. The dimension config specified + in the DataQualitySpec, as is. + passed (bool): + Whether the dimension passed or failed. + score (float): + Output only. The dimension-level data quality score for this + data scan job if and only if the 'dimension' field is set. + + The score ranges between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + """ + + dimension: 'DataQualityDimension' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityDimension', + ) + passed: bool = proto.Field( + proto.BOOL, + number=3, + ) + score: float = proto.Field( + proto.FLOAT, + number=4, + optional=True, + ) + + +class DataQualityDimension(proto.Message): + r"""A dimension captures data quality intent about a defined + subset of the rules specified. + + Attributes: + name (str): + The dimension name a rule belongs to. Supported dimensions + are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", + "UNIQUENESS", "FRESHNESS", "VOLUME"] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataQualityRule(proto.Message): + r"""A rule captures data quality intent about a data source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RangeExpectation): + Row-level rule which evaluates whether each + column value lies between a specified range. + + This field is a member of `oneof`_ ``rule_type``. + non_null_expectation (google.cloud.dataplex_v1.types.DataQualityRule.NonNullExpectation): + Row-level rule which evaluates whether each + column value is null. + + This field is a member of `oneof`_ ``rule_type``. + set_expectation (google.cloud.dataplex_v1.types.DataQualityRule.SetExpectation): + Row-level rule which evaluates whether each + column value is contained by a specified set. + + This field is a member of `oneof`_ ``rule_type``. + regex_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RegexExpectation): + Row-level rule which evaluates whether each + column value matches a specified regex. + + This field is a member of `oneof`_ ``rule_type``. + uniqueness_expectation (google.cloud.dataplex_v1.types.DataQualityRule.UniquenessExpectation): + Row-level rule which evaluates whether each + column value is unique. + + This field is a member of `oneof`_ ``rule_type``. + statistic_range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation): + Aggregate rule which evaluates whether the + column aggregate statistic lies between a + specified range. + + This field is a member of `oneof`_ ``rule_type``. + row_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RowConditionExpectation): + Row-level rule which evaluates whether each + row in a table passes the specified condition. + + This field is a member of `oneof`_ ``rule_type``. + table_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.TableConditionExpectation): + Aggregate rule which evaluates whether the + provided expression is true for a table. + + This field is a member of `oneof`_ ``rule_type``. + sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): + Aggregate rule which evaluates the number of + rows returned for the provided statement. If any + rows are returned, this rule fails. + + This field is a member of `oneof`_ ``rule_type``. + column (str): + Optional. The unnested column which this rule + is evaluated against. + ignore_null (bool): + Optional. Rows with ``null`` values will automatically fail + a rule, unless ``ignore_null`` is ``true``. In that case, + such ``null`` rows are trivially considered passing. + + This field is only valid for the following type of rules: + + - RangeExpectation + - RegexExpectation + - SetExpectation + - UniquenessExpectation + dimension (str): + Required. The dimension a rule belongs to. Results are also + aggregated at the dimension level. Supported dimensions are + **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", + "UNIQUENESS", "FRESHNESS", "VOLUME"]** + threshold (float): + Optional. The minimum ratio of **passing_rows / total_rows** + required to pass this rule, with a range of [0.0, 1.0]. + + 0 indicates default value (i.e. 1.0). + + This field is only valid for row-level type rules. + name (str): + Optional. A mutable name for the rule. + + - The name must contain only letters (a-z, A-Z), numbers + (0-9), or hyphens (-). + - The maximum length is 63 characters. + - Must start with a letter. + - Must end with a number or a letter. + description (str): + Optional. Description of the rule. + + - The maximum length is 1,024 characters. + suspended (bool): + Optional. Whether the Rule is active or + suspended. Default is false. + """ + + class RangeExpectation(proto.Message): + r"""Evaluates whether each column value lies between a specified + range. + + Attributes: + min_value (str): + Optional. The minimum column value allowed for a row to pass + this validation. At least one of ``min_value`` and + ``max_value`` need to be provided. + max_value (str): + Optional. The maximum column value allowed for a row to pass + this validation. At least one of ``min_value`` and + ``max_value`` need to be provided. + strict_min_enabled (bool): + Optional. Whether each value needs to be strictly greater + than ('>') the minimum, or if equality is allowed. + + Only relevant if a ``min_value`` has been defined. Default = + false. + strict_max_enabled (bool): + Optional. Whether each value needs to be strictly lesser + than ('<') the maximum, or if equality is allowed. + + Only relevant if a ``max_value`` has been defined. Default = + false. + """ + + min_value: str = proto.Field( + proto.STRING, + number=1, + ) + max_value: str = proto.Field( + proto.STRING, + number=2, + ) + strict_min_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + strict_max_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class NonNullExpectation(proto.Message): + r"""Evaluates whether each column value is null. + """ + + class SetExpectation(proto.Message): + r"""Evaluates whether each column value is contained by a + specified set. + + Attributes: + values (MutableSequence[str]): + Optional. Expected values for the column + value. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class RegexExpectation(proto.Message): + r"""Evaluates whether each column value matches a specified + regex. + + Attributes: + regex (str): + Optional. A regular expression the column + value is expected to match. + """ + + regex: str = proto.Field( + proto.STRING, + number=1, + ) + + class UniquenessExpectation(proto.Message): + r"""Evaluates whether the column has duplicates. + """ + + class StatisticRangeExpectation(proto.Message): + r"""Evaluates whether the column aggregate statistic lies between + a specified range. + + Attributes: + statistic (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation.ColumnStatistic): + Optional. The aggregate metric to evaluate. + min_value (str): + Optional. The minimum column statistic value allowed for a + row to pass this validation. + + At least one of ``min_value`` and ``max_value`` need to be + provided. + max_value (str): + Optional. The maximum column statistic value allowed for a + row to pass this validation. + + At least one of ``min_value`` and ``max_value`` need to be + provided. + strict_min_enabled (bool): + Optional. Whether column statistic needs to be strictly + greater than ('>') the minimum, or if equality is allowed. + + Only relevant if a ``min_value`` has been defined. Default = + false. + strict_max_enabled (bool): + Optional. Whether column statistic needs to be strictly + lesser than ('<') the maximum, or if equality is allowed. + + Only relevant if a ``max_value`` has been defined. Default = + false. + """ + class ColumnStatistic(proto.Enum): + r"""The list of aggregate metrics a rule can be evaluated + against. + + Values: + STATISTIC_UNDEFINED (0): + Unspecified statistic type + MEAN (1): + Evaluate the column mean + MIN (2): + Evaluate the column min + MAX (3): + Evaluate the column max + """ + STATISTIC_UNDEFINED = 0 + MEAN = 1 + MIN = 2 + MAX = 3 + + statistic: 'DataQualityRule.StatisticRangeExpectation.ColumnStatistic' = proto.Field( + proto.ENUM, + number=1, + enum='DataQualityRule.StatisticRangeExpectation.ColumnStatistic', + ) + min_value: str = proto.Field( + proto.STRING, + number=2, + ) + max_value: str = proto.Field( + proto.STRING, + number=3, + ) + strict_min_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + strict_max_enabled: bool = proto.Field( + proto.BOOL, + number=5, + ) + + class RowConditionExpectation(proto.Message): + r"""Evaluates whether each row passes the specified condition. + + The SQL expression needs to use BigQuery standard SQL syntax and + should produce a boolean value per row as the result. + + Example: col1 >= 0 AND col2 < 10 + + Attributes: + sql_expression (str): + Optional. The SQL expression. + """ + + sql_expression: str = proto.Field( + proto.STRING, + number=1, + ) + + class TableConditionExpectation(proto.Message): + r"""Evaluates whether the provided expression is true. + + The SQL expression needs to use BigQuery standard SQL syntax and + should produce a scalar boolean result. + + Example: MIN(col1) >= 0 + + Attributes: + sql_expression (str): + Optional. The SQL expression. + """ + + sql_expression: str = proto.Field( + proto.STRING, + number=1, + ) + + class SqlAssertion(proto.Message): + r"""A SQL statement that is evaluated to return rows that match an + invalid state. If any rows are are returned, this rule fails. + + The SQL statement must use BigQuery standard SQL syntax, and must + not contain any semicolons. + + You can use the data reference parameter ``${data()}`` to reference + the source table with all of its precondition filters applied. + Examples of precondition filters include row filters, incremental + data filters, and sampling. For more information, see `Data + reference + parameter `__. + + Example: ``SELECT * FROM ${data()} WHERE price < 0`` + + Attributes: + sql_statement (str): + Optional. The SQL statement. + """ + + sql_statement: str = proto.Field( + proto.STRING, + number=1, + ) + + range_expectation: RangeExpectation = proto.Field( + proto.MESSAGE, + number=1, + oneof='rule_type', + message=RangeExpectation, + ) + non_null_expectation: NonNullExpectation = proto.Field( + proto.MESSAGE, + number=2, + oneof='rule_type', + message=NonNullExpectation, + ) + set_expectation: SetExpectation = proto.Field( + proto.MESSAGE, + number=3, + oneof='rule_type', + message=SetExpectation, + ) + regex_expectation: RegexExpectation = proto.Field( + proto.MESSAGE, + number=4, + oneof='rule_type', + message=RegexExpectation, + ) + uniqueness_expectation: UniquenessExpectation = proto.Field( + proto.MESSAGE, + number=100, + oneof='rule_type', + message=UniquenessExpectation, + ) + statistic_range_expectation: StatisticRangeExpectation = proto.Field( + proto.MESSAGE, + number=101, + oneof='rule_type', + message=StatisticRangeExpectation, + ) + row_condition_expectation: RowConditionExpectation = proto.Field( + proto.MESSAGE, + number=200, + oneof='rule_type', + message=RowConditionExpectation, + ) + table_condition_expectation: TableConditionExpectation = proto.Field( + proto.MESSAGE, + number=201, + oneof='rule_type', + message=TableConditionExpectation, + ) + sql_assertion: SqlAssertion = proto.Field( + proto.MESSAGE, + number=202, + oneof='rule_type', + message=SqlAssertion, + ) + column: str = proto.Field( + proto.STRING, + number=500, + ) + ignore_null: bool = proto.Field( + proto.BOOL, + number=501, + ) + dimension: str = proto.Field( + proto.STRING, + number=502, + ) + threshold: float = proto.Field( + proto.DOUBLE, + number=503, + ) + name: str = proto.Field( + proto.STRING, + number=504, + ) + description: str = proto.Field( + proto.STRING, + number=505, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=506, + ) + + +class DataQualityColumnResult(proto.Message): + r"""DataQualityColumnResult provides a more detailed, per-column + view of the results. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + column (str): + Output only. The column specified in the + DataQualityRule. + score (float): + Output only. The column-level data quality score for this + data scan job if and only if the 'column' field is set. + + The score ranges between between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + """ + + column: str = proto.Field( + proto.STRING, + number=1, + ) + score: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py new file mode 100644 index 000000000000..e837b4b01d37 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py @@ -0,0 +1,976 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import security +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataTaxonomy', + 'DataAttribute', + 'DataAttributeBinding', + 'CreateDataTaxonomyRequest', + 'UpdateDataTaxonomyRequest', + 'GetDataTaxonomyRequest', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'DeleteDataTaxonomyRequest', + 'CreateDataAttributeRequest', + 'UpdateDataAttributeRequest', + 'GetDataAttributeRequest', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'DeleteDataAttributeRequest', + 'CreateDataAttributeBindingRequest', + 'UpdateDataAttributeBindingRequest', + 'GetDataAttributeBindingRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'DeleteDataAttributeBindingRequest', + }, +) + + +class DataTaxonomy(proto.Message): + r"""DataTaxonomy represents a set of hierarchical DataAttributes + resources, grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage PII data. + It is defined at project level. + + Attributes: + name (str): + Output only. The relative resource name of the DataTaxonomy, + of the form: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}. + uid (str): + Output only. System generated globally unique + ID for the dataTaxonomy. This ID will be + different if the DataTaxonomy is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataTaxonomy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataTaxonomy + was last updated. + description (str): + Optional. Description of the DataTaxonomy. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataTaxonomy. + attribute_count (int): + Output only. The number of attributes in the + DataTaxonomy. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + class_count (int): + Output only. The number of classes in the + DataTaxonomy. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + attribute_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + class_count: int = proto.Field( + proto.INT32, + number=11, + ) + + +class DataAttribute(proto.Message): + r"""Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a hierarchy. A single + dataAttribute resource can contain specs of multiple types + + :: + + PII + - ResourceAccessSpec : + - readers :foo@bar.com + - DataAccessSpec : + - readers :bar@foo.com + + Attributes: + name (str): + Output only. The relative resource name of the + dataAttribute, of the form: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}. + uid (str): + Output only. System generated globally unique + ID for the DataAttribute. This ID will be + different if the DataAttribute is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataAttribute + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataAttribute + was last updated. + description (str): + Optional. Description of the DataAttribute. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataAttribute. + parent_id (str): + Optional. The ID of the parent DataAttribute resource, + should belong to the same data taxonomy. Circular dependency + in parent chain is not valid. Maximum depth of the hierarchy + allowed is 4. [a -> b -> c -> d -> e, depth = 4] + attribute_count (int): + Output only. The number of child attributes + present for this attribute. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + resource_access_spec (google.cloud.dataplex_v1.types.ResourceAccessSpec): + Optional. Specified when applied to a + resource (eg: Cloud Storage bucket, BigQuery + dataset, BigQuery table). + data_access_spec (google.cloud.dataplex_v1.types.DataAccessSpec): + Optional. Specified when applied to data + stored on the resource (eg: rows, columns in + BigQuery Tables). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent_id: str = proto.Field( + proto.STRING, + number=8, + ) + attribute_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + resource_access_spec: security.ResourceAccessSpec = proto.Field( + proto.MESSAGE, + number=100, + message=security.ResourceAccessSpec, + ) + data_access_spec: security.DataAccessSpec = proto.Field( + proto.MESSAGE, + number=101, + message=security.DataAccessSpec, + ) + + +class DataAttributeBinding(proto.Message): + r"""DataAttributeBinding represents binding of attributes to + resources. Eg: Bind 'CustomerInfo' entity with 'PII' attribute. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the Data + Attribute Binding, of the form: + projects/{project_number}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id} + uid (str): + Output only. System generated globally unique + ID for the DataAttributeBinding. This ID will be + different if the DataAttributeBinding is deleted + and re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the + DataAttributeBinding was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the + DataAttributeBinding was last updated. + description (str): + Optional. Description of the + DataAttributeBinding. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataAttributeBinding. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. Etags + must be used when calling the + DeleteDataAttributeBinding and the + UpdateDataAttributeBinding method. + resource (str): + Optional. Immutable. The resource name of the resource that + is associated to attributes. Presently, only entity resource + is supported in the form: + projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity_id} + Must belong in the same project and region as the attribute + binding, and there can only exist one active binding for a + resource. + + This field is a member of `oneof`_ ``resource_reference``. + attributes (MutableSequence[str]): + Optional. List of attributes to be associated with the + resource, provided in the form: + projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + paths (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding.Path]): + Optional. The list of paths for items within + the associated resource (eg. columns and + partitions within a table) along with attribute + bindings. + """ + + class Path(proto.Message): + r"""Represents a subresource of the given resource, and + associated bindings with it. Currently supported subresources + are column and partition schema fields within a table. + + Attributes: + name (str): + Required. The name identifier of the path. + Nested columns should be of the form: + 'address.city'. + attributes (MutableSequence[str]): + Optional. List of attributes to be associated with the path + of the resource, provided in the form: + projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + attributes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + resource: str = proto.Field( + proto.STRING, + number=100, + oneof='resource_reference', + ) + attributes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=110, + ) + paths: MutableSequence[Path] = proto.RepeatedField( + proto.MESSAGE, + number=120, + message=Path, + ) + + +class CreateDataTaxonomyRequest(proto.Message): + r"""Create DataTaxonomy request. + + Attributes: + parent (str): + Required. The resource name of the data taxonomy location, + of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a GCP region. + data_taxonomy_id (str): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. DataTaxonomy resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_taxonomy_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_taxonomy: 'DataTaxonomy' = proto.Field( + proto.MESSAGE, + number=3, + message='DataTaxonomy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataTaxonomyRequest(proto.Message): + r"""Update DataTaxonomy request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_taxonomy: 'DataTaxonomy' = proto.Field( + proto.MESSAGE, + number=2, + message='DataTaxonomy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataTaxonomyRequest(proto.Message): + r"""Get DataTaxonomy request. + + Attributes: + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataTaxonomiesRequest(proto.Message): + r"""List DataTaxonomies request. + + Attributes: + parent (str): + Required. The resource name of the DataTaxonomy location, of + the form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + page_size (int): + Optional. Maximum number of DataTaxonomies to + return. The service may return fewer than this + value. If unspecified, at most 10 DataTaxonomies + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataTaxonomies`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataTaxonomies`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataTaxonomiesResponse(proto.Message): + r"""List DataTaxonomies response. + + Attributes: + data_taxonomies (MutableSequence[google.cloud.dataplex_v1.types.DataTaxonomy]): + DataTaxonomies under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_taxonomies: MutableSequence['DataTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataTaxonomy', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataTaxonomyRequest(proto.Message): + r"""Delete DataTaxonomy request. + + Attributes: + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + etag (str): + Optional. If the client provided etag value + does not match the current etag value,the + DeleteDataTaxonomy method returns an ABORTED + error. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataAttributeRequest(proto.Message): + r"""Create DataAttribute request. + + Attributes: + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + data_attribute_id (str): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. DataAttribute resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_attribute_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_attribute: 'DataAttribute' = proto.Field( + proto.MESSAGE, + number=3, + message='DataAttribute', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataAttributeRequest(proto.Message): + r"""Update DataAttribute request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_attribute: 'DataAttribute' = proto.Field( + proto.MESSAGE, + number=2, + message='DataAttribute', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataAttributeRequest(proto.Message): + r"""Get DataAttribute request. + + Attributes: + name (str): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataAttributesRequest(proto.Message): + r"""List DataAttributes request. + + Attributes: + parent (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + page_size (int): + Optional. Maximum number of DataAttributes to + return. The service may return fewer than this + value. If unspecified, at most 10 dataAttributes + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataAttributes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataAttributes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataAttributesResponse(proto.Message): + r"""List DataAttributes response. + + Attributes: + data_attributes (MutableSequence[google.cloud.dataplex_v1.types.DataAttribute]): + DataAttributes under the given parent + DataTaxonomy. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_attributes: MutableSequence['DataAttribute'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataAttribute', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataAttributeRequest(proto.Message): + r"""Delete DataAttribute request. + + Attributes: + name (str): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteDataAttribute method returns an ABORTED + error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataAttributeBindingRequest(proto.Message): + r"""Create DataAttributeBinding request. + + Attributes: + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + data_attribute_binding_id (str): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. DataAttributeBinding resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_attribute_binding_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_attribute_binding: 'DataAttributeBinding' = proto.Field( + proto.MESSAGE, + number=3, + message='DataAttributeBinding', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataAttributeBindingRequest(proto.Message): + r"""Update DataAttributeBinding request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_attribute_binding: 'DataAttributeBinding' = proto.Field( + proto.MESSAGE, + number=2, + message='DataAttributeBinding', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataAttributeBindingRequest(proto.Message): + r"""Get DataAttributeBinding request. + + Attributes: + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataAttributeBindingsRequest(proto.Message): + r"""List DataAttributeBindings request. + + Attributes: + parent (str): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + page_size (int): + Optional. Maximum number of + DataAttributeBindings to return. The service may + return fewer than this value. If unspecified, at + most 10 DataAttributeBindings will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataAttributeBindings`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListDataAttributeBindings`` must match the + call that provided the page token. + filter (str): + Optional. Filter request. + Filter using resource: + filter=resource:"resource-name" Filter using + attribute: filter=attributes:"attribute-name" + Filter using attribute in paths list: + + filter=paths.attributes:"attribute-name". + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataAttributeBindingsResponse(proto.Message): + r"""List DataAttributeBindings response. + + Attributes: + data_attribute_bindings (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding]): + DataAttributeBindings under the given parent + Location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_attribute_bindings: MutableSequence['DataAttributeBinding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataAttributeBinding', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataAttributeBindingRequest(proto.Message): + r"""Delete DataAttributeBinding request. + + Attributes: + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + etag (str): + Required. If the client provided etag value + does not match the current etag value, the + DeleteDataAttributeBindingRequest method returns + an ABORTED error response. Etags must be used + when calling the DeleteDataAttributeBinding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py new file mode 100644 index 000000000000..5dc0236e91fd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py @@ -0,0 +1,905 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataScanType', + 'CreateDataScanRequest', + 'UpdateDataScanRequest', + 'DeleteDataScanRequest', + 'GetDataScanRequest', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'GetDataScanJobRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'DataScan', + 'DataScanJob', + }, +) + + +class DataScanType(proto.Enum): + r"""The type of data scan. + + Values: + DATA_SCAN_TYPE_UNSPECIFIED (0): + The data scan type is unspecified. + DATA_QUALITY (1): + Data quality scan. + DATA_PROFILE (2): + Data profile scan. + DATA_DISCOVERY (3): + Data discovery scan. + """ + DATA_SCAN_TYPE_UNSPECIFIED = 0 + DATA_QUALITY = 1 + DATA_PROFILE = 2 + DATA_DISCOVERY = 3 + + +class CreateDataScanRequest(proto.Message): + r"""Create dataScan request. + + Attributes: + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* and + ``location_id`` refers to a GCP region. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource. + data_scan_id (str): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + validate_only (bool): + Optional. Only validate the request, but do not perform + mutations. The default is ``false``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_scan: 'DataScan' = proto.Field( + proto.MESSAGE, + number=2, + message='DataScan', + ) + data_scan_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataScanRequest(proto.Message): + r"""Update dataScan request. + + Attributes: + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. Only validate the request, but do not perform + mutations. The default is ``false``. + """ + + data_scan: 'DataScan' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScan', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteDataScanRequest(proto.Message): + r"""Delete dataScan request. + + Attributes: + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDataScanRequest(proto.Message): + r"""Get dataScan request. + + Attributes: + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): + Optional. Select the DataScan view to return. Defaults to + ``BASIC``. + """ + class DataScanView(proto.Enum): + r"""DataScan view options. + + Values: + DATA_SCAN_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Basic view that does not include *spec* and *result*. + FULL (10): + Include everything. + """ + DATA_SCAN_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: DataScanView = proto.Field( + proto.ENUM, + number=2, + enum=DataScanView, + ) + + +class ListDataScansRequest(proto.Message): + r"""List dataScans request. + + Attributes: + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* and + ``location_id`` refers to a GCP region. + page_size (int): + Optional. Maximum number of dataScans to + return. The service may return fewer than this + value. If unspecified, at most 500 scans will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataScans`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataScans`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields (``name`` or ``create_time``) for + the result. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataScansResponse(proto.Message): + r"""List dataScans response. + + Attributes: + data_scans (MutableSequence[google.cloud.dataplex_v1.types.DataScan]): + DataScans (``BASIC`` view only) under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_scans: MutableSequence['DataScan'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataScan', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class RunDataScanRequest(proto.Message): + r"""Run DataScan Request + + Attributes: + name (str): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + + Only **OnDemand** data scans are allowed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunDataScanResponse(proto.Message): + r"""Run DataScan Response. + + Attributes: + job (google.cloud.dataplex_v1.types.DataScanJob): + DataScanJob created by RunDataScan request. + """ + + job: 'DataScanJob' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScanJob', + ) + + +class GetDataScanJobRequest(proto.Message): + r"""Get DataScanJob request. + + Attributes: + name (str): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): + Optional. Select the DataScanJob view to return. Defaults to + ``BASIC``. + """ + class DataScanJobView(proto.Enum): + r"""DataScanJob view options. + + Values: + DATA_SCAN_JOB_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Basic view that does not include *spec* and *result*. + FULL (10): + Include everything. + """ + DATA_SCAN_JOB_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: DataScanJobView = proto.Field( + proto.ENUM, + number=2, + enum=DataScanJobView, + ) + + +class ListDataScanJobsRequest(proto.Message): + r"""List DataScanJobs request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + page_size (int): + Optional. Maximum number of DataScanJobs to + return. The service may return fewer than this + value. If unspecified, at most 10 DataScanJobs + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataScanJobs`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataScanJobs`` must match the call that + provided the page token. + filter (str): + Optional. An expression for filtering the results of the + ListDataScanJobs request. + + If unspecified, all datascan jobs will be returned. Multiple + filters can be applied (with ``AND``, ``OR`` logical + operators). Filters are case-sensitive. + + Allowed fields are: + + - ``start_time`` + - ``end_time`` + + ``start_time`` and ``end_time`` expect RFC-3339 formatted + strings (e.g. 2018-10-08T18:30:00-07:00). + + For instance, 'start_time > 2018-10-08T00:00:00.123456789Z + AND end_time < 2018-10-09T00:00:00.123456789Z' limits + results to DataScanJobs between specified start and end + times. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDataScanJobsResponse(proto.Message): + r"""List DataScanJobs response. + + Attributes: + data_scan_jobs (MutableSequence[google.cloud.dataplex_v1.types.DataScanJob]): + DataScanJobs (``BASIC`` view only) under a given dataScan. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + data_scan_jobs: MutableSequence['DataScanJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataScanJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GenerateDataQualityRulesRequest(proto.Message): + r"""Request details for generating data quality rule + recommendations. + + Attributes: + name (str): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling job (a + data scan job where the job type is data profiling) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GenerateDataQualityRulesResponse(proto.Message): + r"""Response details for data quality rule recommendations. + + Attributes: + rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): + The data quality rules that Dataplex + generates based on the results of a data + profiling scan. + """ + + rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data_quality.DataQualityRule, + ) + + +class DataScan(proto.Message): + r"""Represents a user-visible job which provides the insights for the + related data source. + + For example: + + - Data Quality: generates queries based on the rules and runs + against the data to get data quality check results. + - Data Profile: analyzes the data in table(s) and generates + insights about the structure, content and relationships (such as + null percent, cardinality, min/max/mean, etc). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the scan, of the + form: + ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + uid (str): + Output only. System generated globally unique + ID for the scan. This ID will be different if + the scan is deleted and re-created with the same + name. + description (str): + Optional. Description of the scan. + + - Must be between 1-1024 characters. + display_name (str): + Optional. User friendly display name. + + - Must be between 1-256 characters. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the scan. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the DataScan. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the scan was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the scan was last + updated. + data (google.cloud.dataplex_v1.types.DataSource): + Required. The data source for DataScan. + execution_spec (google.cloud.dataplex_v1.types.DataScan.ExecutionSpec): + Optional. DataScan execution settings. + + If not specified, the fields in it will use + their default values. + execution_status (google.cloud.dataplex_v1.types.DataScan.ExecutionStatus): + Output only. Status of the data scan + execution. + type_ (google.cloud.dataplex_v1.types.DataScanType): + Output only. The type of DataScan. + data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): + Settings for a data quality scan. + + This field is a member of `oneof`_ ``spec``. + data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): + Settings for a data profile scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Settings for a data discovery scan. + + This field is a member of `oneof`_ ``spec``. + data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): + Output only. The result of a data quality + scan. + + This field is a member of `oneof`_ ``result``. + data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery + scan. + + This field is a member of `oneof`_ ``result``. + """ + + class ExecutionSpec(proto.Message): + r"""DataScan execution settings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + trigger (google.cloud.dataplex_v1.types.Trigger): + Optional. Spec related to how often and when a scan should + be triggered. + + If not specified, the default is ``OnDemand``, which means + the scan will not run until the user calls ``RunDataScan`` + API. + field (str): + Immutable. The unnested field (of type *Date* or + *Timestamp*) that contains values which monotonically + increase over time. + + If not specified, a data scan will run for all data in the + table. + + This field is a member of `oneof`_ ``incremental``. + """ + + trigger: processing.Trigger = proto.Field( + proto.MESSAGE, + number=1, + message=processing.Trigger, + ) + field: str = proto.Field( + proto.STRING, + number=100, + oneof='incremental', + ) + + class ExecutionStatus(proto.Message): + r"""Status of the data scan execution. + + Attributes: + latest_job_start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the latest DataScanJob started. + latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the latest DataScanJob ended. + latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the DataScanJob + execution was created. + """ + + latest_job_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + latest_job_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + latest_job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=6, + enum=resources.State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + data: processing.DataSource = proto.Field( + proto.MESSAGE, + number=9, + message=processing.DataSource, + ) + execution_spec: ExecutionSpec = proto.Field( + proto.MESSAGE, + number=10, + message=ExecutionSpec, + ) + execution_status: ExecutionStatus = proto.Field( + proto.MESSAGE, + number=11, + message=ExecutionStatus, + ) + type_: 'DataScanType' = proto.Field( + proto.ENUM, + number=12, + enum='DataScanType', + ) + data_quality_spec: data_quality.DataQualitySpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=data_quality.DataQualitySpec, + ) + data_profile_spec: data_profile.DataProfileSpec = proto.Field( + proto.MESSAGE, + number=101, + oneof='spec', + message=data_profile.DataProfileSpec, + ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof='spec', + message=data_discovery.DataDiscoverySpec, + ) + data_quality_result: data_quality.DataQualityResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=data_quality.DataQualityResult, + ) + data_profile_result: data_profile.DataProfileResult = proto.Field( + proto.MESSAGE, + number=201, + oneof='result', + message=data_profile.DataProfileResult, + ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof='result', + message=data_discovery.DataDiscoveryResult, + ) + + +class DataScanJob(proto.Message): + r"""A DataScanJob represents an instance of DataScan execution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the DataScanJob, + of the form: + ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a GCP region. + uid (str): + Output only. System generated globally unique + ID for the DataScanJob. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + ended. + state (google.cloud.dataplex_v1.types.DataScanJob.State): + Output only. Execution state for the + DataScanJob. + message (str): + Output only. Additional information about the + current state. + type_ (google.cloud.dataplex_v1.types.DataScanType): + Output only. The type of the parent DataScan. + data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): + Output only. Settings for a data quality + scan. + + This field is a member of `oneof`_ ``spec``. + data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): + Output only. Settings for a data profile + scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Output only. Settings for a data discovery + scan. + + This field is a member of `oneof`_ ``spec``. + data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): + Output only. The result of a data quality + scan. + + This field is a member of `oneof`_ ``result``. + data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery + scan. + + This field is a member of `oneof`_ ``result``. + """ + class State(proto.Enum): + r"""Execution state for the DataScanJob. + + Values: + STATE_UNSPECIFIED (0): + The DataScanJob state is unspecified. + RUNNING (1): + The DataScanJob is running. + CANCELING (2): + The DataScanJob is canceling. + CANCELLED (3): + The DataScanJob cancellation was successful. + SUCCEEDED (4): + The DataScanJob completed successfully. + FAILED (5): + The DataScanJob is no longer running due to + an error. + PENDING (7): + The DataScanJob has been created but not + started to run yet. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + CANCELING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + FAILED = 5 + PENDING = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + message: str = proto.Field( + proto.STRING, + number=6, + ) + type_: 'DataScanType' = proto.Field( + proto.ENUM, + number=7, + enum='DataScanType', + ) + data_quality_spec: data_quality.DataQualitySpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=data_quality.DataQualitySpec, + ) + data_profile_spec: data_profile.DataProfileSpec = proto.Field( + proto.MESSAGE, + number=101, + oneof='spec', + message=data_profile.DataProfileSpec, + ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof='spec', + message=data_discovery.DataDiscoverySpec, + ) + data_quality_result: data_quality.DataQualityResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=data_quality.DataQualityResult, + ) + data_profile_result: data_profile.DataProfileResult = proto.Field( + proto.MESSAGE, + number=201, + oneof='result', + message=data_profile.DataProfileResult, + ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof='result', + message=data_discovery.DataDiscoveryResult, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py new file mode 100644 index 000000000000..6e4c2bbb0ddb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py @@ -0,0 +1,1352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DiscoveryEvent', + 'JobEvent', + 'SessionEvent', + 'GovernanceEvent', + 'DataScanEvent', + 'DataQualityScanRuleResult', + }, +) + + +class DiscoveryEvent(proto.Message): + r"""The payload associated with Discovery data processing. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + lake_id (str): + The id of the associated lake. + zone_id (str): + The id of the associated zone. + asset_id (str): + The id of the associated asset. + data_location (str): + The data location associated with the event. + datascan_id (str): + The id of the associated datascan for + standalone discovery. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): + The type of the event being logged. + config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): + Details about discovery configuration in + effect. + + This field is a member of `oneof`_ ``details``. + entity (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityDetails): + Details about the entity associated with the + event. + + This field is a member of `oneof`_ ``details``. + partition (google.cloud.dataplex_v1.types.DiscoveryEvent.PartitionDetails): + Details about the partition associated with + the event. + + This field is a member of `oneof`_ ``details``. + action (google.cloud.dataplex_v1.types.DiscoveryEvent.ActionDetails): + Details about the action associated with the + event. + + This field is a member of `oneof`_ ``details``. + table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): + Details about the BigQuery table publishing + associated with the event. + + This field is a member of `oneof`_ ``details``. + """ + class EventType(proto.Enum): + r"""The type of the event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + CONFIG (1): + An event representing discovery configuration + in effect. + ENTITY_CREATED (2): + An event representing a metadata entity being + created. + ENTITY_UPDATED (3): + An event representing a metadata entity being + updated. + ENTITY_DELETED (4): + An event representing a metadata entity being + deleted. + PARTITION_CREATED (5): + An event representing a partition being + created. + PARTITION_UPDATED (6): + An event representing a partition being + updated. + PARTITION_DELETED (7): + An event representing a partition being + deleted. + TABLE_PUBLISHED (10): + An event representing a table being + published. + TABLE_UPDATED (11): + An event representing a table being updated. + TABLE_IGNORED (12): + An event representing a table being skipped + in publishing. + TABLE_DELETED (13): + An event representing a table being deleted. + """ + EVENT_TYPE_UNSPECIFIED = 0 + CONFIG = 1 + ENTITY_CREATED = 2 + ENTITY_UPDATED = 3 + ENTITY_DELETED = 4 + PARTITION_CREATED = 5 + PARTITION_UPDATED = 6 + PARTITION_DELETED = 7 + TABLE_PUBLISHED = 10 + TABLE_UPDATED = 11 + TABLE_IGNORED = 12 + TABLE_DELETED = 13 + + class EntityType(proto.Enum): + r"""The type of the entity. + + Values: + ENTITY_TYPE_UNSPECIFIED (0): + An unspecified event type. + TABLE (1): + Entities representing structured data. + FILESET (2): + Entities representing unstructured data. + """ + ENTITY_TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + class TableType(proto.Enum): + r"""The type of the published table. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + An unspecified table type. + EXTERNAL_TABLE (1): + External table type. + BIGLAKE_TABLE (2): + BigLake table type. + OBJECT_TABLE (3): + Object table type for unstructured data. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL_TABLE = 1 + BIGLAKE_TABLE = 2 + OBJECT_TABLE = 3 + + class ConfigDetails(proto.Message): + r"""Details about configuration events. + + Attributes: + parameters (MutableMapping[str, str]): + A list of discovery configuration parameters + in effect. The keys are the field paths within + DiscoverySpec. Eg. includePatterns, + excludePatterns, + csvOptions.disableTypeInference, etc. + """ + + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + class EntityDetails(proto.Message): + r"""Details about the entity. + + Attributes: + entity (str): + The name of the entity resource. + The name is the fully-qualified resource name. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): + The type of the entity resource. + """ + + entity: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DiscoveryEvent.EntityType' = proto.Field( + proto.ENUM, + number=2, + enum='DiscoveryEvent.EntityType', + ) + + class TableDetails(proto.Message): + r"""Details about the published table. + + Attributes: + table (str): + The fully-qualified resource name of the + table resource. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): + The type of the table resource. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DiscoveryEvent.TableType' = proto.Field( + proto.ENUM, + number=2, + enum='DiscoveryEvent.TableType', + ) + + class PartitionDetails(proto.Message): + r"""Details about the partition. + + Attributes: + partition (str): + The name to the partition resource. + The name is the fully-qualified resource name. + entity (str): + The name to the containing entity resource. + The name is the fully-qualified resource name. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): + The type of the containing entity resource. + sampled_data_locations (MutableSequence[str]): + The locations of the data items (e.g., a + Cloud Storage objects) sampled for metadata + inference. + """ + + partition: str = proto.Field( + proto.STRING, + number=1, + ) + entity: str = proto.Field( + proto.STRING, + number=2, + ) + type_: 'DiscoveryEvent.EntityType' = proto.Field( + proto.ENUM, + number=3, + enum='DiscoveryEvent.EntityType', + ) + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ActionDetails(proto.Message): + r"""Details about the action. + + Attributes: + type_ (str): + The type of action. + Eg. IncompatibleDataSchema, InvalidDataFormat + issue (str): + The human readable issue associated with the + action. + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + lake_id: str = proto.Field( + proto.STRING, + number=2, + ) + zone_id: str = proto.Field( + proto.STRING, + number=3, + ) + asset_id: str = proto.Field( + proto.STRING, + number=4, + ) + data_location: str = proto.Field( + proto.STRING, + number=5, + ) + datascan_id: str = proto.Field( + proto.STRING, + number=6, + ) + type_: EventType = proto.Field( + proto.ENUM, + number=10, + enum=EventType, + ) + config: ConfigDetails = proto.Field( + proto.MESSAGE, + number=20, + oneof='details', + message=ConfigDetails, + ) + entity: EntityDetails = proto.Field( + proto.MESSAGE, + number=21, + oneof='details', + message=EntityDetails, + ) + partition: PartitionDetails = proto.Field( + proto.MESSAGE, + number=22, + oneof='details', + message=PartitionDetails, + ) + action: ActionDetails = proto.Field( + proto.MESSAGE, + number=23, + oneof='details', + message=ActionDetails, + ) + table: TableDetails = proto.Field( + proto.MESSAGE, + number=24, + oneof='details', + message=TableDetails, + ) + + +class JobEvent(proto.Message): + r"""The payload associated with Job logs that contains events + describing jobs that have run within a Lake. + + Attributes: + message (str): + The log message. + job_id (str): + The unique id identifying the job. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the job started running. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the job ended running. + state (google.cloud.dataplex_v1.types.JobEvent.State): + The job state on completion. + retries (int): + The number of retries. + type_ (google.cloud.dataplex_v1.types.JobEvent.Type): + The type of the job. + service (google.cloud.dataplex_v1.types.JobEvent.Service): + The service used to execute the job. + service_job (str): + The reference to the job within the service. + execution_trigger (google.cloud.dataplex_v1.types.JobEvent.ExecutionTrigger): + Job execution trigger. + """ + class Type(proto.Enum): + r"""The type of the job. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified job type. + SPARK (1): + Spark jobs. + NOTEBOOK (2): + Notebook jobs. + """ + TYPE_UNSPECIFIED = 0 + SPARK = 1 + NOTEBOOK = 2 + + class State(proto.Enum): + r"""The completion status of the job. + + Values: + STATE_UNSPECIFIED (0): + Unspecified job state. + SUCCEEDED (1): + Job successfully completed. + FAILED (2): + Job was unsuccessful. + CANCELLED (3): + Job was cancelled by the user. + ABORTED (4): + Job was cancelled or aborted via the service + executing the job. + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + CANCELLED = 3 + ABORTED = 4 + + class Service(proto.Enum): + r"""The service used to execute the job. + + Values: + SERVICE_UNSPECIFIED (0): + Unspecified service. + DATAPROC (1): + Cloud Dataproc. + """ + SERVICE_UNSPECIFIED = 0 + DATAPROC = 1 + + class ExecutionTrigger(proto.Enum): + r"""Job Execution trigger. + + Values: + EXECUTION_TRIGGER_UNSPECIFIED (0): + The job execution trigger is unspecified. + TASK_CONFIG (1): + The job was triggered by Dataplex based on + trigger spec from task definition. + RUN_REQUEST (2): + The job was triggered by the explicit call of + Task API. + """ + EXECUTION_TRIGGER_UNSPECIFIED = 0 + TASK_CONFIG = 1 + RUN_REQUEST = 2 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + retries: int = proto.Field( + proto.INT32, + number=6, + ) + type_: Type = proto.Field( + proto.ENUM, + number=7, + enum=Type, + ) + service: Service = proto.Field( + proto.ENUM, + number=8, + enum=Service, + ) + service_job: str = proto.Field( + proto.STRING, + number=9, + ) + execution_trigger: ExecutionTrigger = proto.Field( + proto.ENUM, + number=11, + enum=ExecutionTrigger, + ) + + +class SessionEvent(proto.Message): + r"""These messages contain information about sessions within an + environment. The monitored resource is 'Environment'. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + user_id (str): + The information about the user that created + the session. It will be the email address of the + user. + session_id (str): + Unique identifier for the session. + type_ (google.cloud.dataplex_v1.types.SessionEvent.EventType): + The type of the event. + query (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail): + The execution details of the query. + + This field is a member of `oneof`_ ``detail``. + event_succeeded (bool): + The status of the event. + fast_startup_enabled (bool): + If the session is associated with an + environment with fast startup enabled, and was + created before being assigned to a user. + unassigned_duration (google.protobuf.duration_pb2.Duration): + The idle duration of a warm pooled session + before it is assigned to user. + """ + class EventType(proto.Enum): + r"""The type of the event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + START (1): + Event when the session is assigned to a user. + STOP (2): + Event for stop of a session. + QUERY (3): + Query events in the session. + CREATE (4): + Event for creation of a cluster. It is not + yet assigned to a user. This comes before START + in the sequence + """ + EVENT_TYPE_UNSPECIFIED = 0 + START = 1 + STOP = 2 + QUERY = 3 + CREATE = 4 + + class QueryDetail(proto.Message): + r"""Execution details of the query. + + Attributes: + query_id (str): + The unique Query id identifying the query. + query_text (str): + The query text executed. + engine (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail.Engine): + Query Execution engine. + duration (google.protobuf.duration_pb2.Duration): + Time taken for execution of the query. + result_size_bytes (int): + The size of results the query produced. + data_processed_bytes (int): + The data processed by the query. + """ + class Engine(proto.Enum): + r"""Query Execution engine. + + Values: + ENGINE_UNSPECIFIED (0): + An unspecified Engine type. + SPARK_SQL (1): + Spark-sql engine is specified in Query. + BIGQUERY (2): + BigQuery engine is specified in Query. + """ + ENGINE_UNSPECIFIED = 0 + SPARK_SQL = 1 + BIGQUERY = 2 + + query_id: str = proto.Field( + proto.STRING, + number=1, + ) + query_text: str = proto.Field( + proto.STRING, + number=2, + ) + engine: 'SessionEvent.QueryDetail.Engine' = proto.Field( + proto.ENUM, + number=3, + enum='SessionEvent.QueryDetail.Engine', + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + result_size_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + data_processed_bytes: int = proto.Field( + proto.INT64, + number=6, + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: str = proto.Field( + proto.STRING, + number=2, + ) + session_id: str = proto.Field( + proto.STRING, + number=3, + ) + type_: EventType = proto.Field( + proto.ENUM, + number=4, + enum=EventType, + ) + query: QueryDetail = proto.Field( + proto.MESSAGE, + number=5, + oneof='detail', + message=QueryDetail, + ) + event_succeeded: bool = proto.Field( + proto.BOOL, + number=6, + ) + fast_startup_enabled: bool = proto.Field( + proto.BOOL, + number=7, + ) + unassigned_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + + +class GovernanceEvent(proto.Message): + r"""Payload associated with Governance related log events. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + event_type (google.cloud.dataplex_v1.types.GovernanceEvent.EventType): + The type of the event. + entity (google.cloud.dataplex_v1.types.GovernanceEvent.Entity): + Entity resource information if the log event + is associated with a specific entity. + + This field is a member of `oneof`_ ``_entity``. + """ + class EventType(proto.Enum): + r"""Type of governance log event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + RESOURCE_IAM_POLICY_UPDATE (1): + Resource IAM policy update event. + BIGQUERY_TABLE_CREATE (2): + BigQuery table create event. + BIGQUERY_TABLE_UPDATE (3): + BigQuery table update event. + BIGQUERY_TABLE_DELETE (4): + BigQuery table delete event. + BIGQUERY_CONNECTION_CREATE (5): + BigQuery connection create event. + BIGQUERY_CONNECTION_UPDATE (6): + BigQuery connection update event. + BIGQUERY_CONNECTION_DELETE (7): + BigQuery connection delete event. + BIGQUERY_TAXONOMY_CREATE (10): + BigQuery taxonomy created. + BIGQUERY_POLICY_TAG_CREATE (11): + BigQuery policy tag created. + BIGQUERY_POLICY_TAG_DELETE (12): + BigQuery policy tag deleted. + BIGQUERY_POLICY_TAG_SET_IAM_POLICY (13): + BigQuery set iam policy for policy tag. + ACCESS_POLICY_UPDATE (14): + Access policy update event. + GOVERNANCE_RULE_MATCHED_RESOURCES (15): + Number of resources matched with particular + Query. + GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS (16): + Rule processing exceeds the allowed limit. + GOVERNANCE_RULE_ERRORS (17): + Rule processing errors. + GOVERNANCE_RULE_PROCESSING (18): + Governance rule processing Event. + """ + EVENT_TYPE_UNSPECIFIED = 0 + RESOURCE_IAM_POLICY_UPDATE = 1 + BIGQUERY_TABLE_CREATE = 2 + BIGQUERY_TABLE_UPDATE = 3 + BIGQUERY_TABLE_DELETE = 4 + BIGQUERY_CONNECTION_CREATE = 5 + BIGQUERY_CONNECTION_UPDATE = 6 + BIGQUERY_CONNECTION_DELETE = 7 + BIGQUERY_TAXONOMY_CREATE = 10 + BIGQUERY_POLICY_TAG_CREATE = 11 + BIGQUERY_POLICY_TAG_DELETE = 12 + BIGQUERY_POLICY_TAG_SET_IAM_POLICY = 13 + ACCESS_POLICY_UPDATE = 14 + GOVERNANCE_RULE_MATCHED_RESOURCES = 15 + GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS = 16 + GOVERNANCE_RULE_ERRORS = 17 + GOVERNANCE_RULE_PROCESSING = 18 + + class Entity(proto.Message): + r"""Information about Entity resource that the log event is + associated with. + + Attributes: + entity (str): + The Entity resource the log event is associated with. + Format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}`` + entity_type (google.cloud.dataplex_v1.types.GovernanceEvent.Entity.EntityType): + Type of entity. + """ + class EntityType(proto.Enum): + r"""Type of entity. + + Values: + ENTITY_TYPE_UNSPECIFIED (0): + An unspecified Entity type. + TABLE (1): + Table entity type. + FILESET (2): + Fileset entity type. + """ + ENTITY_TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + entity: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: 'GovernanceEvent.Entity.EntityType' = proto.Field( + proto.ENUM, + number=2, + enum='GovernanceEvent.Entity.EntityType', + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + event_type: EventType = proto.Field( + proto.ENUM, + number=2, + enum=EventType, + ) + entity: Entity = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=Entity, + ) + + +class DataScanEvent(proto.Message): + r"""These messages contain information about the execution of a + datascan. The monitored resource is 'DataScan' + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_source (str): + The data source of the data scan + job_id (str): + The identifier of the specific data scan job + this log entry is for. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job started to + run. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job finished. + type_ (google.cloud.dataplex_v1.types.DataScanEvent.ScanType): + The type of the data scan. + state (google.cloud.dataplex_v1.types.DataScanEvent.State): + The status of the data scan job. + message (str): + The message describing the data scan job + event. + spec_version (str): + A version identifier of the spec which was + used to execute this job. + trigger (google.cloud.dataplex_v1.types.DataScanEvent.Trigger): + The trigger type of the data scan job. + scope (google.cloud.dataplex_v1.types.DataScanEvent.Scope): + The scope of the data scan (e.g. full, + incremental). + data_profile (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileResult): + Data profile result for data profile type + data scan. + + This field is a member of `oneof`_ ``result``. + data_quality (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityResult): + Data quality result for data quality type + data scan. + + This field is a member of `oneof`_ ``result``. + data_profile_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileAppliedConfigs): + Applied configs for data profile type data + scan. + + This field is a member of `oneof`_ ``appliedConfigs``. + data_quality_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityAppliedConfigs): + Applied configs for data quality type data + scan. + + This field is a member of `oneof`_ ``appliedConfigs``. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): + The result of post scan actions. + """ + class ScanType(proto.Enum): + r"""The type of the data scan. + + Values: + SCAN_TYPE_UNSPECIFIED (0): + An unspecified data scan type. + DATA_PROFILE (1): + Data scan for data profile. + DATA_QUALITY (2): + Data scan for data quality. + DATA_DISCOVERY (4): + Data scan for data discovery. + """ + SCAN_TYPE_UNSPECIFIED = 0 + DATA_PROFILE = 1 + DATA_QUALITY = 2 + DATA_DISCOVERY = 4 + + class State(proto.Enum): + r"""The job state of the data scan. + + Values: + STATE_UNSPECIFIED (0): + Unspecified job state. + STARTED (1): + Data scan job started. + SUCCEEDED (2): + Data scan job successfully completed. + FAILED (3): + Data scan job was unsuccessful. + CANCELLED (4): + Data scan job was cancelled. + CREATED (5): + Data scan job was createed. + """ + STATE_UNSPECIFIED = 0 + STARTED = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLED = 4 + CREATED = 5 + + class Trigger(proto.Enum): + r"""The trigger type for the data scan. + + Values: + TRIGGER_UNSPECIFIED (0): + An unspecified trigger type. + ON_DEMAND (1): + Data scan triggers on demand. + SCHEDULE (2): + Data scan triggers as per schedule. + """ + TRIGGER_UNSPECIFIED = 0 + ON_DEMAND = 1 + SCHEDULE = 2 + + class Scope(proto.Enum): + r"""The scope of job for the data scan. + + Values: + SCOPE_UNSPECIFIED (0): + An unspecified scope type. + FULL (1): + Data scan runs on all of the data. + INCREMENTAL (2): + Data scan runs on incremental data. + """ + SCOPE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + + class DataProfileResult(proto.Message): + r"""Data profile result for data scan job. + + Attributes: + row_count (int): + The count of rows processed in the data scan + job. + """ + + row_count: int = proto.Field( + proto.INT64, + number=1, + ) + + class DataQualityResult(proto.Message): + r"""Data quality result for data scan job. + + Attributes: + row_count (int): + The count of rows processed in the data scan + job. + passed (bool): + Whether the data quality result was ``pass`` or not. + dimension_passed (MutableMapping[str, bool]): + The result of each dimension for data quality result. The + key of the map is the name of the dimension. The value is + the bool value depicting whether the dimension result was + ``pass`` or not. + score (float): + The table-level data quality score for the data scan job. + + The data quality score ranges between [0, 100] (up to two + decimal points). + dimension_score (MutableMapping[str, float]): + The score of each dimension for data quality result. The key + of the map is the name of the dimension. The value is the + data quality score for the dimension. + + The score ranges between [0, 100] (up to two decimal + points). + column_score (MutableMapping[str, float]): + The score of each column scanned in the data scan job. The + key of the map is the name of the column. The value is the + data quality score for the column. + + The score ranges between [0, 100] (up to two decimal + points). + """ + + row_count: int = proto.Field( + proto.INT64, + number=1, + ) + passed: bool = proto.Field( + proto.BOOL, + number=2, + ) + dimension_passed: MutableMapping[str, bool] = proto.MapField( + proto.STRING, + proto.BOOL, + number=3, + ) + score: float = proto.Field( + proto.FLOAT, + number=4, + ) + dimension_score: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.FLOAT, + number=5, + ) + column_score: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.FLOAT, + number=6, + ) + + class DataProfileAppliedConfigs(proto.Message): + r"""Applied configs for data profile type data scan job. + + Attributes: + sampling_percent (float): + The percentage of the records selected from the dataset for + DataScan. + + - Value ranges between 0.0 and 100.0. + - Value 0.0 or 100.0 imply that sampling was not applied. + row_filter_applied (bool): + Boolean indicating whether a row filter was + applied in the DataScan job. + column_filter_applied (bool): + Boolean indicating whether a column filter + was applied in the DataScan job. + """ + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=1, + ) + row_filter_applied: bool = proto.Field( + proto.BOOL, + number=2, + ) + column_filter_applied: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class DataQualityAppliedConfigs(proto.Message): + r"""Applied configs for data quality type data scan job. + + Attributes: + sampling_percent (float): + The percentage of the records selected from the dataset for + DataScan. + + - Value ranges between 0.0 and 100.0. + - Value 0.0 or 100.0 imply that sampling was not applied. + row_filter_applied (bool): + Boolean indicating whether a row filter was + applied in the DataScan job. + """ + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=1, + ) + row_filter_applied: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class PostScanActionsResult(proto.Message): + r"""Post scan actions result for data scan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult): + The result of BigQuery export post scan + action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult.State): + Execution state for the BigQuery exporting. + message (str): + Additional information about the BigQuery + exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataScanEvent.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScanEvent.PostScanActionsResult.BigQueryExportResult', + ) + + data_source: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + type_: ScanType = proto.Field( + proto.ENUM, + number=5, + enum=ScanType, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + message: str = proto.Field( + proto.STRING, + number=7, + ) + spec_version: str = proto.Field( + proto.STRING, + number=8, + ) + trigger: Trigger = proto.Field( + proto.ENUM, + number=9, + enum=Trigger, + ) + scope: Scope = proto.Field( + proto.ENUM, + number=10, + enum=Scope, + ) + data_profile: DataProfileResult = proto.Field( + proto.MESSAGE, + number=101, + oneof='result', + message=DataProfileResult, + ) + data_quality: DataQualityResult = proto.Field( + proto.MESSAGE, + number=102, + oneof='result', + message=DataQualityResult, + ) + data_profile_configs: DataProfileAppliedConfigs = proto.Field( + proto.MESSAGE, + number=201, + oneof='appliedConfigs', + message=DataProfileAppliedConfigs, + ) + data_quality_configs: DataQualityAppliedConfigs = proto.Field( + proto.MESSAGE, + number=202, + oneof='appliedConfigs', + message=DataQualityAppliedConfigs, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=11, + message=PostScanActionsResult, + ) + + +class DataQualityScanRuleResult(proto.Message): + r"""Information about the result of a data quality rule for data + quality scan. The monitored resource is 'DataScan'. + + Attributes: + job_id (str): + Identifier of the specific data scan job this + log entry is for. + data_source (str): + The data source of the data scan (e.g. + BigQuery table name). + column (str): + The column which this rule is evaluated + against. + rule_name (str): + The name of the data quality rule. + rule_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.RuleType): + The type of the data quality rule. + evalution_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.EvaluationType): + The evaluation type of the data quality rule. + rule_dimension (str): + The dimension of the data quality rule. + threshold_percent (float): + The passing threshold ([0.0, 100.0]) of the data quality + rule. + result (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.Result): + The result of the data quality rule. + evaluated_row_count (int): + The number of rows evaluated against the data quality rule. + This field is only valid for rules of PER_ROW evaluation + type. + passed_row_count (int): + The number of rows which passed a rule evaluation. This + field is only valid for rules of PER_ROW evaluation type. + null_row_count (int): + The number of rows with null values in the + specified column. + assertion_row_count (int): + The number of rows returned by the SQL + statement in a SQL assertion rule. This field is + only valid for SQL assertion rules. + """ + class RuleType(proto.Enum): + r"""The type of the data quality rule. + + Values: + RULE_TYPE_UNSPECIFIED (0): + An unspecified rule type. + NON_NULL_EXPECTATION (1): + See + [DataQualityRule.NonNullExpectation][google.cloud.dataplex.v1.DataQualityRule.NonNullExpectation]. + RANGE_EXPECTATION (2): + See + [DataQualityRule.RangeExpectation][google.cloud.dataplex.v1.DataQualityRule.RangeExpectation]. + REGEX_EXPECTATION (3): + See + [DataQualityRule.RegexExpectation][google.cloud.dataplex.v1.DataQualityRule.RegexExpectation]. + ROW_CONDITION_EXPECTATION (4): + See + [DataQualityRule.RowConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectation]. + SET_EXPECTATION (5): + See + [DataQualityRule.SetExpectation][google.cloud.dataplex.v1.DataQualityRule.SetExpectation]. + STATISTIC_RANGE_EXPECTATION (6): + See + [DataQualityRule.StatisticRangeExpectation][google.cloud.dataplex.v1.DataQualityRule.StatisticRangeExpectation]. + TABLE_CONDITION_EXPECTATION (7): + See + [DataQualityRule.TableConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.TableConditionExpectation]. + UNIQUENESS_EXPECTATION (8): + See + [DataQualityRule.UniquenessExpectation][google.cloud.dataplex.v1.DataQualityRule.UniquenessExpectation]. + SQL_ASSERTION (9): + See + [DataQualityRule.SqlAssertion][google.cloud.dataplex.v1.DataQualityRule.SqlAssertion]. + """ + RULE_TYPE_UNSPECIFIED = 0 + NON_NULL_EXPECTATION = 1 + RANGE_EXPECTATION = 2 + REGEX_EXPECTATION = 3 + ROW_CONDITION_EXPECTATION = 4 + SET_EXPECTATION = 5 + STATISTIC_RANGE_EXPECTATION = 6 + TABLE_CONDITION_EXPECTATION = 7 + UNIQUENESS_EXPECTATION = 8 + SQL_ASSERTION = 9 + + class EvaluationType(proto.Enum): + r"""The evaluation type of the data quality rule. + + Values: + EVALUATION_TYPE_UNSPECIFIED (0): + An unspecified evaluation type. + PER_ROW (1): + The rule evaluation is done at per row level. + AGGREGATE (2): + The rule evaluation is done for an aggregate + of rows. + """ + EVALUATION_TYPE_UNSPECIFIED = 0 + PER_ROW = 1 + AGGREGATE = 2 + + class Result(proto.Enum): + r"""Whether the data quality rule passed or failed. + + Values: + RESULT_UNSPECIFIED (0): + An unspecified result. + PASSED (1): + The data quality rule passed. + FAILED (2): + The data quality rule failed. + """ + RESULT_UNSPECIFIED = 0 + PASSED = 1 + FAILED = 2 + + job_id: str = proto.Field( + proto.STRING, + number=1, + ) + data_source: str = proto.Field( + proto.STRING, + number=2, + ) + column: str = proto.Field( + proto.STRING, + number=3, + ) + rule_name: str = proto.Field( + proto.STRING, + number=4, + ) + rule_type: RuleType = proto.Field( + proto.ENUM, + number=5, + enum=RuleType, + ) + evalution_type: EvaluationType = proto.Field( + proto.ENUM, + number=6, + enum=EvaluationType, + ) + rule_dimension: str = proto.Field( + proto.STRING, + number=7, + ) + threshold_percent: float = proto.Field( + proto.DOUBLE, + number=8, + ) + result: Result = proto.Field( + proto.ENUM, + number=9, + enum=Result, + ) + evaluated_row_count: int = proto.Field( + proto.INT64, + number=10, + ) + passed_row_count: int = proto.Field( + proto.INT64, + number=11, + ) + null_row_count: int = proto.Field( + proto.INT64, + number=12, + ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=13, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py new file mode 100644 index 000000000000..8d977d572bf0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py @@ -0,0 +1,1182 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'StorageSystem', + 'CreateEntityRequest', + 'UpdateEntityRequest', + 'DeleteEntityRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'GetEntityRequest', + 'ListPartitionsRequest', + 'CreatePartitionRequest', + 'DeletePartitionRequest', + 'ListPartitionsResponse', + 'GetPartitionRequest', + 'Entity', + 'Partition', + 'Schema', + 'StorageFormat', + 'StorageAccess', + }, +) + + +class StorageSystem(proto.Enum): + r"""Identifies the cloud system that manages the data storage. + + Values: + STORAGE_SYSTEM_UNSPECIFIED (0): + Storage system unspecified. + CLOUD_STORAGE (1): + The entity data is contained within a Cloud + Storage bucket. + BIGQUERY (2): + The entity data is contained within a + BigQuery dataset. + """ + STORAGE_SYSTEM_UNSPECIFIED = 0 + CLOUD_STORAGE = 1 + BIGQUERY = 2 + + +class CreateEntityRequest(proto.Message): + r"""Create a metadata entity request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entity: 'Entity' = proto.Field( + proto.MESSAGE, + number=3, + message='Entity', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntityRequest(proto.Message): + r"""Update a metadata entity request. + The exiting entity will be fully replaced by the entity in the + request. The entity ID is mutable. To modify the ID, use the + current entity ID in the request URL and specify the new ID in + the request body. + + Attributes: + entity (google.cloud.dataplex_v1.types.Entity): + Required. Update description. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + entity: 'Entity' = proto.Field( + proto.MESSAGE, + number=2, + message='Entity', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntityRequest(proto.Message): + r"""Delete a metadata entity request. + + Attributes: + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + etag (str): + Required. The etag associated with the entity, which can be + retrieved with a [GetEntity][] request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntitiesRequest(proto.Message): + r"""List metadata entities request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + view (google.cloud.dataplex_v1.types.ListEntitiesRequest.EntityView): + Required. Specify the entity view to make a + partial list request. + page_size (int): + Optional. Maximum number of entities to + return. The service may return fewer than this + value. If unspecified, 100 entities will be + returned by default. The maximum value is 500; + larger values will will be truncated to 500. + page_token (str): + Optional. Page token received from a previous + ``ListEntities`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListEntities`` must match the call that + provided the page token. + filter (str): + Optional. The following filter parameters can be added to + the URL to limit the entities returned by the API: + + - Entity ID: ?filter="id=entityID" + - Asset ID: ?filter="asset=assetID" + - Data path ?filter="data_path=gs://my-bucket" + - Is HIVE compatible: ?filter="hive_compatible=true" + - Is BigQuery compatible: + ?filter="bigquery_compatible=true". + """ + class EntityView(proto.Enum): + r"""Entity views. + + Values: + ENTITY_VIEW_UNSPECIFIED (0): + The default unset value. Return both table + and fileset entities if unspecified. + TABLES (1): + Only list table entities. + FILESETS (2): + Only list fileset entities. + """ + ENTITY_VIEW_UNSPECIFIED = 0 + TABLES = 1 + FILESETS = 2 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: EntityView = proto.Field( + proto.ENUM, + number=2, + enum=EntityView, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntitiesResponse(proto.Message): + r"""List metadata entities response. + + Attributes: + entities (MutableSequence[google.cloud.dataplex_v1.types.Entity]): + Entities in the specified parent zone. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no remaining results in + the list. + """ + + @property + def raw_page(self): + return self + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEntityRequest(proto.Message): + r"""Get metadata entity request. + + Attributes: + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + view (google.cloud.dataplex_v1.types.GetEntityRequest.EntityView): + Optional. Used to select the subset of entity information to + return. Defaults to ``BASIC``. + """ + class EntityView(proto.Enum): + r"""Entity views for get entity partial result. + + Values: + ENTITY_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Minimal view that does not include the + schema. + SCHEMA (2): + Include basic information and schema. + FULL (4): + Include everything. Currently, this is the + same as the SCHEMA view. + """ + ENTITY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + SCHEMA = 2 + FULL = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: EntityView = proto.Field( + proto.ENUM, + number=2, + enum=EntityView, + ) + + +class ListPartitionsRequest(proto.Message): + r"""List metadata partitions request. + + Attributes: + parent (str): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + page_size (int): + Optional. Maximum number of partitions to + return. The service may return fewer than this + value. If unspecified, 100 partitions will be + returned by default. The maximum page size is + 500; larger values will will be truncated to + 500. + page_token (str): + Optional. Page token received from a previous + ``ListPartitions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListPartitions`` must match the call that + provided the page token. + filter (str): + Optional. Filter the partitions returned to the caller using + a key value pair expression. Supported operators and syntax: + + - logic operators: AND, OR + - comparison operators: <, >, >=, <= ,=, != + - LIKE operators: + + - The right hand of a LIKE operator supports "." and "*" + for wildcard searches, for example "value1 LIKE + ".*oo.*" + + - parenthetical grouping: ( ) + + Sample filter expression: \`?filter="key1 < value1 OR key2 > + value2" + + **Notes:** + + - Keys to the left of operators are case insensitive. + - Partition results are sorted first by creation time, then + by lexicographic order. + - Up to 20 key value filter pairs are allowed, but due to + performance considerations, only the first 10 will be + used as a filter. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreatePartitionRequest(proto.Message): + r"""Create metadata partition request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + partition: 'Partition' = proto.Field( + proto.MESSAGE, + number=3, + message='Partition', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeletePartitionRequest(proto.Message): + r"""Delete metadata partition request. + + Attributes: + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. + etag (str): + Optional. The etag associated with the + partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListPartitionsResponse(proto.Message): + r"""List metadata partitions response. + + Attributes: + partitions (MutableSequence[google.cloud.dataplex_v1.types.Partition]): + Partitions under the specified parent entity. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no remaining results in + the list. + """ + + @property + def raw_page(self): + return self + + partitions: MutableSequence['Partition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Partition', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPartitionRequest(proto.Message): + r"""Get metadata partition request. + + Attributes: + name (str): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Entity(proto.Message): + r"""Represents tables and fileset metadata contained within a + zone. + + Attributes: + name (str): + Output only. The resource name of the entity, of the form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{id}``. + display_name (str): + Optional. Display name must be shorter than + or equal to 256 characters. + description (str): + Optional. User friendly longer description + text. Must be shorter than or equal to 1024 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entity was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entity was + last updated. + id (str): + Required. A user-provided entity ID. It is + mutable, and will be used as the published table + name. Specifying a new ID in an update entity + request will override the existing value. + The ID must contain only letters (a-z, A-Z), + numbers (0-9), and underscores, and consist of + 256 or fewer characters. + etag (str): + Optional. The etag associated with the entity, which can be + retrieved with a [GetEntity][] request. Required for update + and delete requests. + type_ (google.cloud.dataplex_v1.types.Entity.Type): + Required. Immutable. The type of entity. + asset (str): + Required. Immutable. The ID of the asset + associated with the storage location containing + the entity data. The entity must be with in the + same zone with the asset. + data_path (str): + Required. Immutable. The storage path of the entity data. + For Cloud Storage data, this is the fully-qualified path to + the entity, such as ``gs://bucket/path/to/data``. For + BigQuery data, this is the name of the table resource, such + as + ``projects/project_id/datasets/dataset_id/tables/table_id``. + data_path_pattern (str): + Optional. The set of items within the data path constituting + the data in the entity, represented as a glob path. Example: + ``gs://bucket/path/to/data/**/*.csv``. + catalog_entry (str): + Output only. The name of the associated Data + Catalog entry. + system (google.cloud.dataplex_v1.types.StorageSystem): + Required. Immutable. Identifies the storage + system of the entity data. + format_ (google.cloud.dataplex_v1.types.StorageFormat): + Required. Identifies the storage format of + the entity data. It does not apply to entities + with data stored in BigQuery. + compatibility (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus): + Output only. Metadata stores that the entity + is compatible with. + access (google.cloud.dataplex_v1.types.StorageAccess): + Output only. Identifies the access mechanism + to the entity. Not user settable. + uid (str): + Output only. System generated unique ID for + the Entity. This ID will be different if the + Entity is deleted and re-created with the same + name. + schema (google.cloud.dataplex_v1.types.Schema): + Required. The description of the data structure and layout. + The schema is not included in list responses. It is only + included in ``SCHEMA`` and ``FULL`` entity views of a + ``GetEntity`` response. + """ + class Type(proto.Enum): + r"""The type of entity. + + Values: + TYPE_UNSPECIFIED (0): + Type unspecified. + TABLE (1): + Structured and semi-structured data. + FILESET (2): + Unstructured data. + """ + TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + class CompatibilityStatus(proto.Message): + r"""Provides compatibility information for various metadata + stores. + + Attributes: + hive_metastore (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): + Output only. Whether this entity is + compatible with Hive Metastore. + bigquery (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): + Output only. Whether this entity is + compatible with BigQuery. + """ + + class Compatibility(proto.Message): + r"""Provides compatibility information for a specific metadata + store. + + Attributes: + compatible (bool): + Output only. Whether the entity is compatible + and can be represented in the metadata store. + reason (str): + Output only. Provides additional detail if + the entity is incompatible with the metadata + store. + """ + + compatible: bool = proto.Field( + proto.BOOL, + number=1, + ) + reason: str = proto.Field( + proto.STRING, + number=2, + ) + + hive_metastore: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( + proto.MESSAGE, + number=1, + message='Entity.CompatibilityStatus.Compatibility', + ) + bigquery: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( + proto.MESSAGE, + number=2, + message='Entity.CompatibilityStatus.Compatibility', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + id: str = proto.Field( + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + type_: Type = proto.Field( + proto.ENUM, + number=10, + enum=Type, + ) + asset: str = proto.Field( + proto.STRING, + number=11, + ) + data_path: str = proto.Field( + proto.STRING, + number=12, + ) + data_path_pattern: str = proto.Field( + proto.STRING, + number=13, + ) + catalog_entry: str = proto.Field( + proto.STRING, + number=14, + ) + system: 'StorageSystem' = proto.Field( + proto.ENUM, + number=15, + enum='StorageSystem', + ) + format_: 'StorageFormat' = proto.Field( + proto.MESSAGE, + number=16, + message='StorageFormat', + ) + compatibility: CompatibilityStatus = proto.Field( + proto.MESSAGE, + number=19, + message=CompatibilityStatus, + ) + access: 'StorageAccess' = proto.Field( + proto.MESSAGE, + number=21, + message='StorageAccess', + ) + uid: str = proto.Field( + proto.STRING, + number=22, + ) + schema: 'Schema' = proto.Field( + proto.MESSAGE, + number=50, + message='Schema', + ) + + +class Partition(proto.Message): + r"""Represents partition metadata contained within entity + instances. + + Attributes: + name (str): + Output only. Partition values used in the HTTP URL must be + double encoded. For example, + ``url_encode(url_encode(value))`` can be used to encode + "US:CA/CA#Sunnyvale so that the request URL ends with + "/partitions/US%253ACA/CA%2523Sunnyvale". The name field in + the response retains the encoded format. + values (MutableSequence[str]): + Required. Immutable. The set of values + representing the partition, which correspond to + the partition schema defined in the parent + entity. + location (str): + Required. Immutable. The location of the entity data within + the partition, for example, + ``gs://bucket/path/to/entity/key1=value1/key2=value2``. Or + ``projects//datasets//tables/`` + etag (str): + Optional. The etag for this partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Schema(proto.Message): + r"""Schema information describing the structure and layout of the + data. + + Attributes: + user_managed (bool): + Required. Set to ``true`` if user-managed or ``false`` if + managed by Dataplex. The default is ``false`` (managed by + Dataplex). + + - Set to ``false``\ to enable Dataplex discovery to update + the schema. including new data discovery, schema + inference, and schema evolution. Users retain the ability + to input and edit the schema. Dataplex treats schema + input by the user as though produced by a previous + Dataplex discovery operation, and it will evolve the + schema and take action based on that treatment. + + - Set to ``true`` to fully manage the entity schema. This + setting guarantees that Dataplex will not change schema + fields. + fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): + Optional. The sequence of fields describing data in table + entities. **Note:** BigQuery SchemaFields are immutable. + partition_fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.PartitionField]): + Optional. The sequence of fields describing + the partition structure in entities. If this + field is empty, there are no partitions within + the data. + partition_style (google.cloud.dataplex_v1.types.Schema.PartitionStyle): + Optional. The structure of paths containing + partition data within the entity. + """ + class Type(proto.Enum): + r"""Type information for fields in schemas and partition schemas. + + Values: + TYPE_UNSPECIFIED (0): + SchemaType unspecified. + BOOLEAN (1): + Boolean field. + BYTE (2): + Single byte numeric field. + INT16 (3): + 16-bit numeric field. + INT32 (4): + 32-bit numeric field. + INT64 (5): + 64-bit numeric field. + FLOAT (6): + Floating point numeric field. + DOUBLE (7): + Double precision numeric field. + DECIMAL (8): + Real value numeric field. + STRING (9): + Sequence of characters field. + BINARY (10): + Sequence of bytes field. + TIMESTAMP (11): + Date and time field. + DATE (12): + Date field. + TIME (13): + Time field. + RECORD (14): + Structured field. Nested fields that define + the structure of the map. If all nested fields + are nullable, this field represents a union. + NULL (100): + Null field that does not have values. + """ + TYPE_UNSPECIFIED = 0 + BOOLEAN = 1 + BYTE = 2 + INT16 = 3 + INT32 = 4 + INT64 = 5 + FLOAT = 6 + DOUBLE = 7 + DECIMAL = 8 + STRING = 9 + BINARY = 10 + TIMESTAMP = 11 + DATE = 12 + TIME = 13 + RECORD = 14 + NULL = 100 + + class Mode(proto.Enum): + r"""Additional qualifiers to define field semantics. + + Values: + MODE_UNSPECIFIED (0): + Mode unspecified. + REQUIRED (1): + The field has required semantics. + NULLABLE (2): + The field has optional semantics, and may be + null. + REPEATED (3): + The field has repeated (0 or more) semantics, + and is a list of values. + """ + MODE_UNSPECIFIED = 0 + REQUIRED = 1 + NULLABLE = 2 + REPEATED = 3 + + class PartitionStyle(proto.Enum): + r"""The structure of paths within the entity, which represent + partitions. + + Values: + PARTITION_STYLE_UNSPECIFIED (0): + PartitionStyle unspecified + HIVE_COMPATIBLE (1): + Partitions are hive-compatible. Examples: + ``gs://bucket/path/to/table/dt=2019-10-31/lang=en``, + ``gs://bucket/path/to/table/dt=2019-10-31/lang=en/late``. + """ + PARTITION_STYLE_UNSPECIFIED = 0 + HIVE_COMPATIBLE = 1 + + class SchemaField(proto.Message): + r"""Represents a column field within a table schema. + + Attributes: + name (str): + Required. The name of the field. Must contain + only letters, numbers and underscores, with a + maximum length of 767 characters, and must begin + with a letter or underscore. + description (str): + Optional. User friendly field description. + Must be less than or equal to 1024 characters. + type_ (google.cloud.dataplex_v1.types.Schema.Type): + Required. The type of field. + mode (google.cloud.dataplex_v1.types.Schema.Mode): + Required. Additional field semantics. + fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): + Optional. Any nested field for complex types. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + type_: 'Schema.Type' = proto.Field( + proto.ENUM, + number=3, + enum='Schema.Type', + ) + mode: 'Schema.Mode' = proto.Field( + proto.ENUM, + number=4, + enum='Schema.Mode', + ) + fields: MutableSequence['Schema.SchemaField'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='Schema.SchemaField', + ) + + class PartitionField(proto.Message): + r"""Represents a key field within the entity's partition structure. You + could have up to 20 partition fields, but only the first 10 + partitions have the filtering ability due to performance + consideration. **Note:** Partition fields are immutable. + + Attributes: + name (str): + Required. Partition field name must consist + of letters, numbers, and underscores only, with + a maximum of length of 256 characters, and must + begin with a letter or underscore.. + type_ (google.cloud.dataplex_v1.types.Schema.Type): + Required. Immutable. The type of field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Schema.Type' = proto.Field( + proto.ENUM, + number=2, + enum='Schema.Type', + ) + + user_managed: bool = proto.Field( + proto.BOOL, + number=1, + ) + fields: MutableSequence[SchemaField] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SchemaField, + ) + partition_fields: MutableSequence[PartitionField] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=PartitionField, + ) + partition_style: PartitionStyle = proto.Field( + proto.ENUM, + number=4, + enum=PartitionStyle, + ) + + +class StorageFormat(proto.Message): + r"""Describes the format of the data within its storage location. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + format_ (google.cloud.dataplex_v1.types.StorageFormat.Format): + Output only. The data format associated with + the stored data, which represents content type + values. The value is inferred from mime type. + compression_format (google.cloud.dataplex_v1.types.StorageFormat.CompressionFormat): + Optional. The compression type associated + with the stored data. If unspecified, the data + is uncompressed. + mime_type (str): + Required. The mime type descriptor for the + data. Must match the pattern {type}/{subtype}. + Supported values: + + - application/x-parquet + - application/x-avro + - application/x-orc + - application/x-tfrecord + - application/x-parquet+iceberg + - application/x-avro+iceberg + - application/x-orc+iceberg + - application/json + - application/{subtypes} + - text/csv + - text/ + - image/{image subtype} + - video/{video subtype} + - audio/{audio subtype} + csv (google.cloud.dataplex_v1.types.StorageFormat.CsvOptions): + Optional. Additional information about CSV + formatted data. + + This field is a member of `oneof`_ ``options``. + json (google.cloud.dataplex_v1.types.StorageFormat.JsonOptions): + Optional. Additional information about CSV + formatted data. + + This field is a member of `oneof`_ ``options``. + iceberg (google.cloud.dataplex_v1.types.StorageFormat.IcebergOptions): + Optional. Additional information about + iceberg tables. + + This field is a member of `oneof`_ ``options``. + """ + class Format(proto.Enum): + r"""The specific file format of the data. + + Values: + FORMAT_UNSPECIFIED (0): + Format unspecified. + PARQUET (1): + Parquet-formatted structured data. + AVRO (2): + Avro-formatted structured data. + ORC (3): + Orc-formatted structured data. + CSV (100): + Csv-formatted semi-structured data. + JSON (101): + Json-formatted semi-structured data. + IMAGE (200): + Image data formats (such as jpg and png). + AUDIO (201): + Audio data formats (such as mp3, and wav). + VIDEO (202): + Video data formats (such as mp4 and mpg). + TEXT (203): + Textual data formats (such as txt and xml). + TFRECORD (204): + TensorFlow record format. + OTHER (1000): + Data that doesn't match a specific format. + UNKNOWN (1001): + Data of an unknown format. + """ + FORMAT_UNSPECIFIED = 0 + PARQUET = 1 + AVRO = 2 + ORC = 3 + CSV = 100 + JSON = 101 + IMAGE = 200 + AUDIO = 201 + VIDEO = 202 + TEXT = 203 + TFRECORD = 204 + OTHER = 1000 + UNKNOWN = 1001 + + class CompressionFormat(proto.Enum): + r"""The specific compressed file format of the data. + + Values: + COMPRESSION_FORMAT_UNSPECIFIED (0): + CompressionFormat unspecified. Implies + uncompressed data. + GZIP (2): + GZip compressed set of files. + BZIP2 (3): + BZip2 compressed set of files. + """ + COMPRESSION_FORMAT_UNSPECIFIED = 0 + GZIP = 2 + BZIP2 = 3 + + class CsvOptions(proto.Message): + r"""Describes CSV and similar semi-structured data formats. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + Accepts "US-ASCII", "UTF-8", and "ISO-8859-1". + Defaults to UTF-8 if unspecified. + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. Defaults to 0. + delimiter (str): + Optional. The delimiter used to separate + values. Defaults to ','. + quote (str): + Optional. The character used to quote column + values. Accepts '"' (double quotation mark) or + ''' (single quotation mark). Defaults to '"' + (double quotation mark) if unspecified. + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + header_rows: int = proto.Field( + proto.INT32, + number=2, + ) + delimiter: str = proto.Field( + proto.STRING, + number=3, + ) + quote: str = proto.Field( + proto.STRING, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describes JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + Accepts "US-ASCII", "UTF-8" and "ISO-8859-1". + Defaults to UTF-8 if not specified. + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + + class IcebergOptions(proto.Message): + r"""Describes Iceberg data format. + + Attributes: + metadata_location (str): + Optional. The location of where the iceberg + metadata is present, must be within the table + path + """ + + metadata_location: str = proto.Field( + proto.STRING, + number=1, + ) + + format_: Format = proto.Field( + proto.ENUM, + number=1, + enum=Format, + ) + compression_format: CompressionFormat = proto.Field( + proto.ENUM, + number=2, + enum=CompressionFormat, + ) + mime_type: str = proto.Field( + proto.STRING, + number=3, + ) + csv: CsvOptions = proto.Field( + proto.MESSAGE, + number=10, + oneof='options', + message=CsvOptions, + ) + json: JsonOptions = proto.Field( + proto.MESSAGE, + number=11, + oneof='options', + message=JsonOptions, + ) + iceberg: IcebergOptions = proto.Field( + proto.MESSAGE, + number=12, + oneof='options', + message=IcebergOptions, + ) + + +class StorageAccess(proto.Message): + r"""Describes the access mechanism of the data within its storage + location. + + Attributes: + read (google.cloud.dataplex_v1.types.StorageAccess.AccessMode): + Output only. Describes the read access + mechanism of the data. Not user settable. + """ + class AccessMode(proto.Enum): + r"""Access Mode determines how data stored within the Entity is + read. + + Values: + ACCESS_MODE_UNSPECIFIED (0): + Access mode unspecified. + DIRECT (1): + Default. Data is accessed directly using + storage APIs. + MANAGED (2): + Data is accessed through a managed interface + using BigQuery APIs. + """ + ACCESS_MODE_UNSPECIFIED = 0 + DIRECT = 1 + MANAGED = 2 + + read: AccessMode = proto.Field( + proto.ENUM, + number=21, + enum=AccessMode, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py new file mode 100644 index 000000000000..4e88739fa5ce --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Trigger', + 'DataSource', + 'ScannedData', + }, +) + + +class Trigger(proto.Message): + r"""DataScan scheduling and trigger settings. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_demand (google.cloud.dataplex_v1.types.Trigger.OnDemand): + The scan runs once via ``RunDataScan`` API. + + This field is a member of `oneof`_ ``mode``. + schedule (google.cloud.dataplex_v1.types.Trigger.Schedule): + The scan is scheduled to run periodically. + + This field is a member of `oneof`_ ``mode``. + """ + + class OnDemand(proto.Message): + r"""The scan runs once via ``RunDataScan`` API. + """ + + class Schedule(proto.Message): + r"""The scan is scheduled to run periodically. + + Attributes: + cron (str): + Required. `Cron `__ + schedule for running scans periodically. + + To explicitly set a timezone in the cron tab, apply a prefix + in the cron tab: **"CRON_TZ=${IANA_TIME_ZONE}"** or + **"TZ=${IANA_TIME_ZONE}"**. The **${IANA_TIME_ZONE}** may + only be a valid string from IANA time zone database + (`wikipedia `__). + For example, ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. + + This field is required for Schedule scans. + """ + + cron: str = proto.Field( + proto.STRING, + number=1, + ) + + on_demand: OnDemand = proto.Field( + proto.MESSAGE, + number=100, + oneof='mode', + message=OnDemand, + ) + schedule: Schedule = proto.Field( + proto.MESSAGE, + number=101, + oneof='mode', + message=Schedule, + ) + + +class DataSource(proto.Message): + r"""The data source for DataScan. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + entity (str): + Immutable. The Dataplex entity that represents the data + source (e.g. BigQuery table) for DataScan, of the form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This field is a member of `oneof`_ ``source``. + resource (str): + Immutable. The service-qualified full resource name of the + cloud resource for a DataScan job to scan against. The field + could be: BigQuery table of type "TABLE" for + DataProfileScan/DataQualityScan Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + + This field is a member of `oneof`_ ``source``. + """ + + entity: str = proto.Field( + proto.STRING, + number=100, + oneof='source', + ) + resource: str = proto.Field( + proto.STRING, + number=101, + oneof='source', + ) + + +class ScannedData(proto.Message): + r"""The data scanned during processing (e.g. in incremental + DataScan) + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + incremental_field (google.cloud.dataplex_v1.types.ScannedData.IncrementalField): + The range denoted by values of an incremental + field + + This field is a member of `oneof`_ ``data_range``. + """ + + class IncrementalField(proto.Message): + r"""A data range denoted by a pair of start/end values of a + field. + + Attributes: + field (str): + The field that contains values which + monotonically increases over time (e.g. a + timestamp column). + start (str): + Value that marks the start of the range. + end (str): + Value that marks the end of the range. + """ + + field: str = proto.Field( + proto.STRING, + number=1, + ) + start: str = proto.Field( + proto.STRING, + number=2, + ) + end: str = proto.Field( + proto.STRING, + number=3, + ) + + incremental_field: IncrementalField = proto.Field( + proto.MESSAGE, + number=1, + oneof='data_range', + message=IncrementalField, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py new file mode 100644 index 000000000000..d07ae9df21e3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py @@ -0,0 +1,1444 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'State', + 'Lake', + 'AssetStatus', + 'Zone', + 'Action', + 'Asset', + }, +) + + +class State(proto.Enum): + r"""State of a resource. + + Values: + STATE_UNSPECIFIED (0): + State is not specified. + ACTIVE (1): + Resource is active, i.e., ready to use. + CREATING (2): + Resource is under creation. + DELETING (3): + Resource is under deletion. + ACTION_REQUIRED (4): + Resource is active but has unresolved + actions. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + ACTION_REQUIRED = 4 + + +class Lake(proto.Message): + r"""A lake is a centralized repository for managing enterprise + data across the organization distributed across many cloud + projects, and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources attached to a + lake are referred to as managed resources. Data within these + managed resources can be structured or unstructured. A lake + provides data admins with tools to organize, secure and manage + their data at scale, and provides data scientists and data + engineers an integrated experience to easily search, discover, + analyze and transform data and associated metadata. + + Attributes: + name (str): + Output only. The relative resource name of the lake, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the lake. This ID will be different if + the lake is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the lake was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the lake was last + updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the lake. + description (str): + Optional. Description of the lake. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the lake. + service_account (str): + Output only. Service account associated with + this lake. This service account must be + authorized to access or operate on resources + managed by the lake. + metastore (google.cloud.dataplex_v1.types.Lake.Metastore): + Optional. Settings to manage lake and + Dataproc Metastore service instance association. + asset_status (google.cloud.dataplex_v1.types.AssetStatus): + Output only. Aggregated status of the + underlying assets of the lake. + metastore_status (google.cloud.dataplex_v1.types.Lake.MetastoreStatus): + Output only. Metastore status of the lake. + """ + + class Metastore(proto.Message): + r"""Settings to manage association of Dataproc Metastore with a + lake. + + Attributes: + service (str): + Optional. A relative reference to the Dataproc Metastore + (https://cloud.google.com/dataproc-metastore/docs) service + associated with the lake: + ``projects/{project_id}/locations/{location_id}/services/{service_id}`` + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + + class MetastoreStatus(proto.Message): + r"""Status of Lake and Dataproc Metastore service instance + association. + + Attributes: + state (google.cloud.dataplex_v1.types.Lake.MetastoreStatus.State): + Current state of association. + message (str): + Additional information about the current + status. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the metastore status of + the lake. + endpoint (str): + The URI of the endpoint used to access the + Metastore service. + """ + class State(proto.Enum): + r"""Current state of association. + + Values: + STATE_UNSPECIFIED (0): + Unspecified. + NONE (1): + A Metastore service instance is not + associated with the lake. + READY (2): + A Metastore service instance is attached to + the lake. + UPDATING (3): + Attach/detach is in progress. + ERROR (4): + Attach/detach could not be done due to + errors. + """ + STATE_UNSPECIFIED = 0 + NONE = 1 + READY = 2 + UPDATING = 3 + ERROR = 4 + + state: 'Lake.MetastoreStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Lake.MetastoreStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + endpoint: str = proto.Field( + proto.STRING, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + service_account: str = proto.Field( + proto.STRING, + number=9, + ) + metastore: Metastore = proto.Field( + proto.MESSAGE, + number=102, + message=Metastore, + ) + asset_status: 'AssetStatus' = proto.Field( + proto.MESSAGE, + number=103, + message='AssetStatus', + ) + metastore_status: MetastoreStatus = proto.Field( + proto.MESSAGE, + number=104, + message=MetastoreStatus, + ) + + +class AssetStatus(proto.Message): + r"""Aggregated status of the underlying assets of a lake or zone. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + active_assets (int): + Number of active assets. + security_policy_applying_assets (int): + Number of assets that are in process of + updating the security policy on attached + resources. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + active_assets: int = proto.Field( + proto.INT32, + number=2, + ) + security_policy_applying_assets: int = proto.Field( + proto.INT32, + number=3, + ) + + +class Zone(proto.Message): + r"""A zone represents a logical group of related assets within a + lake. A zone can be used to map to organizational structure or + represent stages of data readiness from raw to curated. It + provides managing behavior that is shared or inherited by all + contained assets. + + Attributes: + name (str): + Output only. The relative resource name of the zone, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the zone. This ID will be different if + the zone is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the zone was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the zone was last + updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the zone. + description (str): + Optional. Description of the zone. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the zone. + type_ (google.cloud.dataplex_v1.types.Zone.Type): + Required. Immutable. The type of the zone. + discovery_spec (google.cloud.dataplex_v1.types.Zone.DiscoverySpec): + Optional. Specification of the discovery + feature applied to data in this zone. + resource_spec (google.cloud.dataplex_v1.types.Zone.ResourceSpec): + Required. Specification of the resources that + are referenced by the assets within this zone. + asset_status (google.cloud.dataplex_v1.types.AssetStatus): + Output only. Aggregated status of the + underlying assets of the zone. + """ + class Type(proto.Enum): + r"""Type of zone. + + Values: + TYPE_UNSPECIFIED (0): + Zone type not specified. + RAW (1): + A zone that contains data that needs further + processing before it is considered generally + ready for consumption and analytics workloads. + CURATED (2): + A zone that contains data that is considered + to be ready for broader consumption and + analytics workloads. Curated structured data + stored in Cloud Storage must conform to certain + file formats (parquet, avro and orc) and + organized in a hive-compatible directory layout. + """ + TYPE_UNSPECIFIED = 0 + RAW = 1 + CURATED = 2 + + class ResourceSpec(proto.Message): + r"""Settings for resources attached as assets within a zone. + + Attributes: + location_type (google.cloud.dataplex_v1.types.Zone.ResourceSpec.LocationType): + Required. Immutable. The location type of the + resources that are allowed to be attached to the + assets within this zone. + """ + class LocationType(proto.Enum): + r"""Location type of the resources attached to a zone. + + Values: + LOCATION_TYPE_UNSPECIFIED (0): + Unspecified location type. + SINGLE_REGION (1): + Resources that are associated with a single + region. + MULTI_REGION (2): + Resources that are associated with a + multi-region location. + """ + LOCATION_TYPE_UNSPECIFIED = 0 + SINGLE_REGION = 1 + MULTI_REGION = 2 + + location_type: 'Zone.ResourceSpec.LocationType' = proto.Field( + proto.ENUM, + number=1, + enum='Zone.ResourceSpec.LocationType', + ) + + class DiscoverySpec(proto.Message): + r"""Settings to manage the metadata discovery and publishing in a + zone. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Required. Whether discovery is enabled. + include_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to include during discovery if + only a subset of the data should considered. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + exclude_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to exclude during discovery. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + csv_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.JsonOptions): + Optional. Configuration for Json data. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running discovery periodically. Successive discovery + runs must be scheduled at least 60 minutes apart. The + default value is to run discovery every 60 minutes. To + explicitly set a timezone to the cron tab, apply a prefix in + the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or + TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a + valid string from IANA time zone database. For example, + \`CRON_TZ=America/New_York 1 + + - + + - + + - \*\ ``, or``\ TZ=America/New_York 1 \* \* \* \*`. + + This field is a member of `oneof`_ ``trigger``. + """ + + class CsvOptions(proto.Message): + r"""Describe CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter being used to + separate values. This defaults to ','. + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for CSV data. If true, all columns + will be registered as strings. + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describe JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for Json data. If true, all columns + will be registered as their primitive types + (strings, number or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + csv_options: 'Zone.DiscoverySpec.CsvOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='Zone.DiscoverySpec.CsvOptions', + ) + json_options: 'Zone.DiscoverySpec.JsonOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='Zone.DiscoverySpec.JsonOptions', + ) + schedule: str = proto.Field( + proto.STRING, + number=10, + oneof='trigger', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + type_: Type = proto.Field( + proto.ENUM, + number=9, + enum=Type, + ) + discovery_spec: DiscoverySpec = proto.Field( + proto.MESSAGE, + number=103, + message=DiscoverySpec, + ) + resource_spec: ResourceSpec = proto.Field( + proto.MESSAGE, + number=104, + message=ResourceSpec, + ) + asset_status: 'AssetStatus' = proto.Field( + proto.MESSAGE, + number=105, + message='AssetStatus', + ) + + +class Action(proto.Message): + r"""Action represents an issue requiring administrator action for + resolution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + category (google.cloud.dataplex_v1.types.Action.Category): + The category of issue associated with the + action. + issue (str): + Detailed description of the issue requiring + action. + detect_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the issue was detected. + name (str): + Output only. The relative resource name of the action, of + the form: + ``projects/{project}/locations/{location}/lakes/{lake}/actions/{action}`` + ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/actions/{action}`` + ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}/actions/{action}``. + lake (str): + Output only. The relative resource name of the lake, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + zone (str): + Output only. The relative resource name of the zone, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + asset (str): + Output only. The relative resource name of the asset, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + data_locations (MutableSequence[str]): + The list of data locations associated with this action. + Cloud Storage locations are represented as URI paths(E.g. + ``gs://bucket/table1/year=2020/month=Jan/``). BigQuery + locations refer to resource names(E.g. + ``bigquery.googleapis.com/projects/project-id/datasets/dataset-id``). + invalid_data_format (google.cloud.dataplex_v1.types.Action.InvalidDataFormat): + Details for issues related to invalid or + unsupported data formats. + + This field is a member of `oneof`_ ``details``. + incompatible_data_schema (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema): + Details for issues related to incompatible + schemas detected within data. + + This field is a member of `oneof`_ ``details``. + invalid_data_partition (google.cloud.dataplex_v1.types.Action.InvalidDataPartition): + Details for issues related to invalid or + unsupported data partition structure. + + This field is a member of `oneof`_ ``details``. + missing_data (google.cloud.dataplex_v1.types.Action.MissingData): + Details for issues related to absence of data + within managed resources. + + This field is a member of `oneof`_ ``details``. + missing_resource (google.cloud.dataplex_v1.types.Action.MissingResource): + Details for issues related to absence of a + managed resource. + + This field is a member of `oneof`_ ``details``. + unauthorized_resource (google.cloud.dataplex_v1.types.Action.UnauthorizedResource): + Details for issues related to lack of + permissions to access data resources. + + This field is a member of `oneof`_ ``details``. + failed_security_policy_apply (google.cloud.dataplex_v1.types.Action.FailedSecurityPolicyApply): + Details for issues related to applying + security policy. + + This field is a member of `oneof`_ ``details``. + invalid_data_organization (google.cloud.dataplex_v1.types.Action.InvalidDataOrganization): + Details for issues related to invalid data + arrangement. + + This field is a member of `oneof`_ ``details``. + """ + class Category(proto.Enum): + r"""The category of issues. + + Values: + CATEGORY_UNSPECIFIED (0): + Unspecified category. + RESOURCE_MANAGEMENT (1): + Resource management related issues. + SECURITY_POLICY (2): + Security policy related issues. + DATA_DISCOVERY (3): + Data and discovery related issues. + """ + CATEGORY_UNSPECIFIED = 0 + RESOURCE_MANAGEMENT = 1 + SECURITY_POLICY = 2 + DATA_DISCOVERY = 3 + + class MissingResource(proto.Message): + r"""Action details for resource references in assets that cannot + be located. + + """ + + class UnauthorizedResource(proto.Message): + r"""Action details for unauthorized resource issues raised to + indicate that the service account associated with the lake + instance is not authorized to access or manage the resource + associated with an asset. + + """ + + class FailedSecurityPolicyApply(proto.Message): + r"""Failed to apply security policy to the managed resource(s) + under a lake, zone or an asset. For a lake or zone resource, one + or more underlying assets has a failure applying security policy + to the associated managed resource. + + Attributes: + asset (str): + Resource name of one of the assets with + failing security policy application. Populated + for a lake or zone resource only. + """ + + asset: str = proto.Field( + proto.STRING, + number=1, + ) + + class InvalidDataFormat(proto.Message): + r"""Action details for invalid or unsupported data files detected + by discovery. + + Attributes: + sampled_data_locations (MutableSequence[str]): + The list of data locations sampled and used + for format/schema inference. + expected_format (str): + The expected data format of the entity. + new_format (str): + The new unexpected data format within the + entity. + """ + + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + expected_format: str = proto.Field( + proto.STRING, + number=2, + ) + new_format: str = proto.Field( + proto.STRING, + number=3, + ) + + class IncompatibleDataSchema(proto.Message): + r"""Action details for incompatible schemas detected by + discovery. + + Attributes: + table (str): + The name of the table containing invalid + data. + existing_schema (str): + The existing and expected schema of the + table. The schema is provided as a JSON + formatted structure listing columns and data + types. + new_schema (str): + The new and incompatible schema within the + table. The schema is provided as a JSON + formatted structured listing columns and data + types. + sampled_data_locations (MutableSequence[str]): + The list of data locations sampled and used + for format/schema inference. + schema_change (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema.SchemaChange): + Whether the action relates to a schema that + is incompatible or modified. + """ + class SchemaChange(proto.Enum): + r"""Whether the action relates to a schema that is incompatible + or modified. + + Values: + SCHEMA_CHANGE_UNSPECIFIED (0): + Schema change unspecified. + INCOMPATIBLE (1): + Newly discovered schema is incompatible with + existing schema. + MODIFIED (2): + Newly discovered schema has changed from + existing schema for data in a curated zone. + """ + SCHEMA_CHANGE_UNSPECIFIED = 0 + INCOMPATIBLE = 1 + MODIFIED = 2 + + table: str = proto.Field( + proto.STRING, + number=1, + ) + existing_schema: str = proto.Field( + proto.STRING, + number=2, + ) + new_schema: str = proto.Field( + proto.STRING, + number=3, + ) + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + schema_change: 'Action.IncompatibleDataSchema.SchemaChange' = proto.Field( + proto.ENUM, + number=5, + enum='Action.IncompatibleDataSchema.SchemaChange', + ) + + class InvalidDataPartition(proto.Message): + r"""Action details for invalid or unsupported partitions detected + by discovery. + + Attributes: + expected_structure (google.cloud.dataplex_v1.types.Action.InvalidDataPartition.PartitionStructure): + The issue type of InvalidDataPartition. + """ + class PartitionStructure(proto.Enum): + r"""The expected partition structure. + + Values: + PARTITION_STRUCTURE_UNSPECIFIED (0): + PartitionStructure unspecified. + CONSISTENT_KEYS (1): + Consistent hive-style partition definition + (both raw and curated zone). + HIVE_STYLE_KEYS (2): + Hive style partition definition (curated zone + only). + """ + PARTITION_STRUCTURE_UNSPECIFIED = 0 + CONSISTENT_KEYS = 1 + HIVE_STYLE_KEYS = 2 + + expected_structure: 'Action.InvalidDataPartition.PartitionStructure' = proto.Field( + proto.ENUM, + number=1, + enum='Action.InvalidDataPartition.PartitionStructure', + ) + + class MissingData(proto.Message): + r"""Action details for absence of data detected by discovery. + """ + + class InvalidDataOrganization(proto.Message): + r"""Action details for invalid data arrangement. + """ + + category: Category = proto.Field( + proto.ENUM, + number=1, + enum=Category, + ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) + detect_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + lake: str = proto.Field( + proto.STRING, + number=6, + ) + zone: str = proto.Field( + proto.STRING, + number=7, + ) + asset: str = proto.Field( + proto.STRING, + number=8, + ) + data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + invalid_data_format: InvalidDataFormat = proto.Field( + proto.MESSAGE, + number=10, + oneof='details', + message=InvalidDataFormat, + ) + incompatible_data_schema: IncompatibleDataSchema = proto.Field( + proto.MESSAGE, + number=11, + oneof='details', + message=IncompatibleDataSchema, + ) + invalid_data_partition: InvalidDataPartition = proto.Field( + proto.MESSAGE, + number=12, + oneof='details', + message=InvalidDataPartition, + ) + missing_data: MissingData = proto.Field( + proto.MESSAGE, + number=13, + oneof='details', + message=MissingData, + ) + missing_resource: MissingResource = proto.Field( + proto.MESSAGE, + number=14, + oneof='details', + message=MissingResource, + ) + unauthorized_resource: UnauthorizedResource = proto.Field( + proto.MESSAGE, + number=15, + oneof='details', + message=UnauthorizedResource, + ) + failed_security_policy_apply: FailedSecurityPolicyApply = proto.Field( + proto.MESSAGE, + number=21, + oneof='details', + message=FailedSecurityPolicyApply, + ) + invalid_data_organization: InvalidDataOrganization = proto.Field( + proto.MESSAGE, + number=22, + oneof='details', + message=InvalidDataOrganization, + ) + + +class Asset(proto.Message): + r"""An asset represents a cloud resource that is being managed + within a lake as a member of a zone. + + Attributes: + name (str): + Output only. The relative resource name of the asset, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the asset. This ID will be different if + the asset is deleted and re-created with the + same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the asset was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the asset was last + updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the asset. + description (str): + Optional. Description of the asset. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the asset. + resource_spec (google.cloud.dataplex_v1.types.Asset.ResourceSpec): + Required. Specification of the resource that + is referenced by this asset. + resource_status (google.cloud.dataplex_v1.types.Asset.ResourceStatus): + Output only. Status of the resource + referenced by this asset. + security_status (google.cloud.dataplex_v1.types.Asset.SecurityStatus): + Output only. Status of the security policy + applied to resource referenced by this asset. + discovery_spec (google.cloud.dataplex_v1.types.Asset.DiscoverySpec): + Optional. Specification of the discovery + feature applied to data referenced by this + asset. When this spec is left unset, the asset + will use the spec set on the parent zone. + discovery_status (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus): + Output only. Status of the discovery feature + applied to data referenced by this asset. + """ + + class SecurityStatus(proto.Message): + r"""Security policy status of the asset. Data security policy, + i.e., readers, writers & owners, should be specified in the + lake/zone/asset IAM policy. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.SecurityStatus.State): + The current state of the security policy + applied to the attached resource. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + """ + class State(proto.Enum): + r"""The state of the security policy. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + READY (1): + Security policy has been successfully applied + to the attached resource. + APPLYING (2): + Security policy is in the process of being + applied to the attached resource. + ERROR (3): + Security policy could not be applied to the + attached resource due to errors. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + APPLYING = 2 + ERROR = 3 + + state: 'Asset.SecurityStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.SecurityStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class DiscoverySpec(proto.Message): + r"""Settings to manage the metadata discovery and publishing for + an asset. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Optional. Whether discovery is enabled. + include_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to include during discovery if + only a subset of the data should considered. + For Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + exclude_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to exclude during discovery. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + csv_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.JsonOptions): + Optional. Configuration for Json data. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running discovery periodically. Successive discovery + runs must be scheduled at least 60 minutes apart. The + default value is to run discovery every 60 minutes. To + explicitly set a timezone to the cron tab, apply a prefix in + the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or + TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a + valid string from IANA time zone database. For example, + \`CRON_TZ=America/New_York 1 + + - + + - + + - \*\ ``, or``\ TZ=America/New_York 1 \* \* \* \*`. + + This field is a member of `oneof`_ ``trigger``. + """ + + class CsvOptions(proto.Message): + r"""Describe CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter being used to + separate values. This defaults to ','. + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for CSV data. If true, all columns + will be registered as strings. + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describe JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for Json data. If true, all columns + will be registered as their primitive types + (strings, number or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + csv_options: 'Asset.DiscoverySpec.CsvOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='Asset.DiscoverySpec.CsvOptions', + ) + json_options: 'Asset.DiscoverySpec.JsonOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='Asset.DiscoverySpec.JsonOptions', + ) + schedule: str = proto.Field( + proto.STRING, + number=10, + oneof='trigger', + ) + + class ResourceSpec(proto.Message): + r"""Identifies the cloud resource that is referenced by this + asset. + + Attributes: + name (str): + Immutable. Relative name of the cloud resource that contains + the data that is being managed within a lake. For example: + ``projects/{project_number}/buckets/{bucket_id}`` + ``projects/{project_number}/datasets/{dataset_id}`` + type_ (google.cloud.dataplex_v1.types.Asset.ResourceSpec.Type): + Required. Immutable. Type of resource. + read_access_mode (google.cloud.dataplex_v1.types.Asset.ResourceSpec.AccessMode): + Optional. Determines how read permissions are + handled for each asset and their associated + tables. Only available to storage buckets + assets. + """ + class Type(proto.Enum): + r"""Type of resource. + + Values: + TYPE_UNSPECIFIED (0): + Type not specified. + STORAGE_BUCKET (1): + Cloud Storage bucket. + BIGQUERY_DATASET (2): + BigQuery dataset. + """ + TYPE_UNSPECIFIED = 0 + STORAGE_BUCKET = 1 + BIGQUERY_DATASET = 2 + + class AccessMode(proto.Enum): + r"""Access Mode determines how data stored within the resource is + read. This is only applicable to storage bucket assets. + + Values: + ACCESS_MODE_UNSPECIFIED (0): + Access mode unspecified. + DIRECT (1): + Default. Data is accessed directly using + storage APIs. + MANAGED (2): + Data is accessed through a managed interface + using BigQuery APIs. + """ + ACCESS_MODE_UNSPECIFIED = 0 + DIRECT = 1 + MANAGED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Asset.ResourceSpec.Type' = proto.Field( + proto.ENUM, + number=2, + enum='Asset.ResourceSpec.Type', + ) + read_access_mode: 'Asset.ResourceSpec.AccessMode' = proto.Field( + proto.ENUM, + number=5, + enum='Asset.ResourceSpec.AccessMode', + ) + + class ResourceStatus(proto.Message): + r"""Status of the resource referenced by an asset. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.ResourceStatus.State): + The current state of the managed resource. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + managed_access_identity (str): + Output only. Service account associated with + the BigQuery Connection. + """ + class State(proto.Enum): + r"""The state of a resource. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + READY (1): + Resource does not have any errors. + ERROR (2): + Resource has errors. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + ERROR = 2 + + state: 'Asset.ResourceStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.ResourceStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + managed_access_identity: str = proto.Field( + proto.STRING, + number=4, + ) + + class DiscoveryStatus(proto.Message): + r"""Status of discovery for an asset. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.State): + The current status of the discovery feature. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of the last discovery run. + stats (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.Stats): + Data Stats of the asset reported by + discovery. + last_run_duration (google.protobuf.duration_pb2.Duration): + The duration of the last discovery run. + """ + class State(proto.Enum): + r"""Current state of discovery. + + Values: + STATE_UNSPECIFIED (0): + State is unspecified. + SCHEDULED (1): + Discovery for the asset is scheduled. + IN_PROGRESS (2): + Discovery for the asset is running. + PAUSED (3): + Discovery for the asset is currently paused + (e.g. due to a lack of available resources). It + will be automatically resumed. + DISABLED (5): + Discovery for the asset is disabled. + """ + STATE_UNSPECIFIED = 0 + SCHEDULED = 1 + IN_PROGRESS = 2 + PAUSED = 3 + DISABLED = 5 + + class Stats(proto.Message): + r"""The aggregated data statistics for the asset reported by + discovery. + + Attributes: + data_items (int): + The count of data items within the referenced + resource. + data_size (int): + The number of stored data bytes within the + referenced resource. + tables (int): + The count of table entities within the + referenced resource. + filesets (int): + The count of fileset entities within the + referenced resource. + """ + + data_items: int = proto.Field( + proto.INT64, + number=1, + ) + data_size: int = proto.Field( + proto.INT64, + number=2, + ) + tables: int = proto.Field( + proto.INT64, + number=3, + ) + filesets: int = proto.Field( + proto.INT64, + number=4, + ) + + state: 'Asset.DiscoveryStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.DiscoveryStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + stats: 'Asset.DiscoveryStatus.Stats' = proto.Field( + proto.MESSAGE, + number=6, + message='Asset.DiscoveryStatus.Stats', + ) + last_run_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + resource_spec: ResourceSpec = proto.Field( + proto.MESSAGE, + number=100, + message=ResourceSpec, + ) + resource_status: ResourceStatus = proto.Field( + proto.MESSAGE, + number=101, + message=ResourceStatus, + ) + security_status: SecurityStatus = proto.Field( + proto.MESSAGE, + number=103, + message=SecurityStatus, + ) + discovery_spec: DiscoverySpec = proto.Field( + proto.MESSAGE, + number=106, + message=DiscoverySpec, + ) + discovery_status: DiscoveryStatus = proto.Field( + proto.MESSAGE, + number=107, + message=DiscoveryStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py new file mode 100644 index 000000000000..3fa729be6197 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'ResourceAccessSpec', + 'DataAccessSpec', + }, +) + + +class ResourceAccessSpec(proto.Message): + r"""ResourceAccessSpec holds the access control configuration to + be enforced on the resources, for example, Cloud Storage bucket, + BigQuery dataset, BigQuery table. + + Attributes: + readers (MutableSequence[str]): + Optional. The format of strings follows the + pattern followed by IAM in the bindings. + user:{email}, serviceAccount:{email} + group:{email}. The set of principals to be + granted reader role on the resource. + writers (MutableSequence[str]): + Optional. The set of principals to be granted + writer role on the resource. + owners (MutableSequence[str]): + Optional. The set of principals to be granted + owner role on the resource. + """ + + readers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + writers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + owners: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DataAccessSpec(proto.Message): + r"""DataAccessSpec holds the access control configuration to be + enforced on data stored within resources (eg: rows, columns in + BigQuery Tables). When associated with data, the data is only + accessible to principals explicitly granted access through the + DataAccessSpec. Principals with access to the containing + resource are not implicitly granted access. + + Attributes: + readers (MutableSequence[str]): + Optional. The format of strings follows the + pattern followed by IAM in the bindings. + user:{email}, serviceAccount:{email} + group:{email}. The set of principals to be + granted reader role on data stored within + resources. + """ + + readers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py new file mode 100644 index 000000000000..5493ed665a56 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py @@ -0,0 +1,1395 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import tasks as gcd_tasks +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'CreateLakeRequest', + 'UpdateLakeRequest', + 'DeleteLakeRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListLakeActionsRequest', + 'ListActionsResponse', + 'GetLakeRequest', + 'CreateZoneRequest', + 'UpdateZoneRequest', + 'DeleteZoneRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'ListZoneActionsRequest', + 'GetZoneRequest', + 'CreateAssetRequest', + 'UpdateAssetRequest', + 'DeleteAssetRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListAssetActionsRequest', + 'GetAssetRequest', + 'OperationMetadata', + 'CreateTaskRequest', + 'UpdateTaskRequest', + 'DeleteTaskRequest', + 'ListTasksRequest', + 'ListTasksResponse', + 'GetTaskRequest', + 'GetJobRequest', + 'RunTaskRequest', + 'RunTaskResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'CancelJobRequest', + 'CreateEnvironmentRequest', + 'UpdateEnvironmentRequest', + 'DeleteEnvironmentRequest', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'GetEnvironmentRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + }, +) + + +class CreateLakeRequest(proto.Message): + r"""Create lake request. + + Attributes: + parent (str): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a GCP region. + lake_id (str): + Required. Lake identifier. This ID will be used to generate + names such as database and dataset names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Lake resource + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + lake_id: str = proto.Field( + proto.STRING, + number=2, + ) + lake: resources.Lake = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Lake, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateLakeRequest(proto.Message): + r"""Update lake request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + lake: resources.Lake = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Lake, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteLakeRequest(proto.Message): + r"""Delete lake request. + + Attributes: + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLakesRequest(proto.Message): + r"""List lakes request. + + Attributes: + parent (str): + Required. The resource name of the lake location, of the + form: ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + page_size (int): + Optional. Maximum number of Lakes to return. + The service may return fewer than this value. If + unspecified, at most 10 lakes will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListLakes`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListLakes`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLakesResponse(proto.Message): + r"""List lakes response. + + Attributes: + lakes (MutableSequence[google.cloud.dataplex_v1.types.Lake]): + Lakes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + lakes: MutableSequence[resources.Lake] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Lake, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListLakeActionsRequest(proto.Message): + r"""List lake actions request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListLakeActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListLakeActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListActionsResponse(proto.Message): + r"""List actions response. + + Attributes: + actions (MutableSequence[google.cloud.dataplex_v1.types.Action]): + Actions under the given parent + lake/zone/asset. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + actions: MutableSequence[resources.Action] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Action, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLakeRequest(proto.Message): + r"""Get lake request. + + Attributes: + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateZoneRequest(proto.Message): + r"""Create zone request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + zone_id (str): + Required. Zone identifier. This ID will be used to generate + names such as database and dataset names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in a + project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + zone (google.cloud.dataplex_v1.types.Zone): + Required. Zone resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + zone_id: str = proto.Field( + proto.STRING, + number=2, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Zone, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateZoneRequest(proto.Message): + r"""Update zone request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Zone, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteZoneRequest(proto.Message): + r"""Delete zone request. + + Attributes: + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListZonesRequest(proto.Message): + r"""List zones request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of zones to return. + The service may return fewer than this value. If + unspecified, at most 10 zones will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListZones`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListZones`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListZonesResponse(proto.Message): + r"""List zones response. + + Attributes: + zones (MutableSequence[google.cloud.dataplex_v1.types.Zone]): + Zones under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + zones: MutableSequence[resources.Zone] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Zone, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListZoneActionsRequest(proto.Message): + r"""List zone actions request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListZoneActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListZoneActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetZoneRequest(proto.Message): + r"""Get zone request. + + Attributes: + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAssetRequest(proto.Message): + r"""Create asset request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + asset_id (str): + Required. Asset identifier. This ID will be used to generate + names such as table names when publishing metadata to Hive + Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Asset resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + asset_id: str = proto.Field( + proto.STRING, + number=2, + ) + asset: resources.Asset = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Asset, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateAssetRequest(proto.Message): + r"""Update asset request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + asset: resources.Asset = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Asset, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteAssetRequest(proto.Message): + r"""Delete asset request. + + Attributes: + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAssetsRequest(proto.Message): + r"""List assets request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + page_size (int): + Optional. Maximum number of asset to return. + The service may return fewer than this value. If + unspecified, at most 10 assets will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListAssets`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListAssets`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAssetsResponse(proto.Message): + r"""List assets response. + + Attributes: + assets (MutableSequence[google.cloud.dataplex_v1.types.Asset]): + Asset under the given parent zone. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + assets: MutableSequence[resources.Asset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Asset, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAssetActionsRequest(proto.Message): + r"""List asset actions request. + + Attributes: + parent (str): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListAssetActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListAssetActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetAssetRequest(proto.Message): + r"""Get asset request. + + Attributes: + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CreateTaskRequest(proto.Message): + r"""Create task request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + task_id (str): + Required. Task identifier. + task (google.cloud.dataplex_v1.types.Task): + Required. Task resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + task: gcd_tasks.Task = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_tasks.Task, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateTaskRequest(proto.Message): + r"""Update task request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + task (google.cloud.dataplex_v1.types.Task): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + task: gcd_tasks.Task = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tasks.Task, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Delete task request. + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""List tasks request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of tasks to return. + The service may return fewer than this value. If + unspecified, at most 10 tasks will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListZones`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListZones`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListTasksResponse(proto.Message): + r"""List tasks response. + + Attributes: + tasks (MutableSequence[google.cloud.dataplex_v1.types.Task]): + Tasks under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + tasks: MutableSequence[gcd_tasks.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tasks.Task, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetTaskRequest(proto.Message): + r"""Get task request. + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetJobRequest(proto.Message): + r"""Get job request. + + Attributes: + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the task. + If the map is left empty, the task will run with + existing labels from task definition. If the map + contains an entry with a new key, the same will + be added to existing set of labels. If the map + contains an entry with an existing label key in + task definition, the task will run with new + label value for that entry. Clearing an existing + label will require label value to be explicitly + set to a hyphen "-". The label value cannot be + empty. + args (MutableMapping[str, str]): + Optional. Execution spec arguments. If the + map is left empty, the task will run with + existing execution spec args from task + definition. If the map contains an entry with a + new key, the same will be added to existing set + of args. If the map contains an entry with an + existing arg key in task definition, the task + will run with new arg value for that entry. + Clearing an existing arg will require arg value + to be explicitly set to a hyphen "-". The arg + value cannot be empty. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + args: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class RunTaskResponse(proto.Message): + r""" + + Attributes: + job (google.cloud.dataplex_v1.types.Job): + Jobs created by RunTask API. + """ + + job: gcd_tasks.Job = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tasks.Job, + ) + + +class ListJobsRequest(proto.Message): + r"""List jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + page_size (int): + Optional. Maximum number of jobs to return. + The service may return fewer than this value. If + unspecified, at most 10 jobs will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListJobs`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListJobs`` + must match the call that provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListJobsResponse(proto.Message): + r"""List jobs response. + + Attributes: + jobs (MutableSequence[google.cloud.dataplex_v1.types.Job]): + Jobs under a given task. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[gcd_tasks.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tasks.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelJobRequest(proto.Message): + r"""Cancel task jobs. + + Attributes: + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEnvironmentRequest(proto.Message): + r"""Create environment request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + environment_id: str = proto.Field( + proto.STRING, + number=2, + ) + environment: analyze.Environment = proto.Field( + proto.MESSAGE, + number=3, + message=analyze.Environment, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEnvironmentRequest(proto.Message): + r"""Update environment request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + environment: analyze.Environment = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Environment, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEnvironmentRequest(proto.Message): + r"""Delete environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnvironmentsRequest(proto.Message): + r"""List environments request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of environments to + return. The service may return fewer than this + value. If unspecified, at most 10 environments + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEnvironments`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListEnvironments`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEnvironmentsResponse(proto.Message): + r"""List environments response. + + Attributes: + environments (MutableSequence[google.cloud.dataplex_v1.types.Environment]): + Environments under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + environments: MutableSequence[analyze.Environment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Environment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEnvironmentRequest(proto.Message): + r"""Get environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""List sessions request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + page_size (int): + Optional. Maximum number of sessions to + return. The service may return fewer than this + value. If unspecified, at most 10 sessions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListSessions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListSessions`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. The following ``mode`` filter is + supported to return only the sessions belonging to the + requester when the mode is USER and return sessions of all + the users when the mode is ADMIN. When no filter is sent + default to USER mode. NOTE: When the mode is ADMIN, the + requester should have + ``dataplex.environments.listAllSessions`` permission to list + all sessions, in absence of the permission, the request + fails. + + mode = ADMIN \| USER + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSessionsResponse(proto.Message): + r"""List sessions response. + + Attributes: + sessions (MutableSequence[google.cloud.dataplex_v1.types.Session]): + Sessions under a given environment. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + sessions: MutableSequence[analyze.Session] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Session, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py new file mode 100644 index 000000000000..3f0b83c776fc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py @@ -0,0 +1,751 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import resources +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Task', + 'Job', + }, +) + + +class Task(proto.Message): + r"""A task represents a user-visible job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the task, of the + form: + projects/{project_number}/locations/{location_id}/lakes/{lake_id}/ + tasks/{task_id}. + uid (str): + Output only. System generated globally unique + ID for the task. This ID will be different if + the task is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the task was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the task was last + updated. + description (str): + Optional. Description of the task. + display_name (str): + Optional. User friendly display name. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the task. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the task. + trigger_spec (google.cloud.dataplex_v1.types.Task.TriggerSpec): + Required. Spec related to how often and when + a task should be triggered. + execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): + Required. Spec related to how a task is + executed. + execution_status (google.cloud.dataplex_v1.types.Task.ExecutionStatus): + Output only. Status of the latest task + executions. + spark (google.cloud.dataplex_v1.types.Task.SparkTaskConfig): + Config related to running custom Spark tasks. + + This field is a member of `oneof`_ ``config``. + notebook (google.cloud.dataplex_v1.types.Task.NotebookTaskConfig): + Config related to running scheduled + Notebooks. + + This field is a member of `oneof`_ ``config``. + """ + + class InfrastructureSpec(proto.Message): + r"""Configuration for the underlying infrastructure used to run + workloads. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.BatchComputeResources): + Compute resources needed for a Task when + using Dataproc Serverless. + + This field is a member of `oneof`_ ``resources``. + container_image (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.ContainerImageRuntime): + Container Image Runtime Configuration. + + This field is a member of `oneof`_ ``runtime``. + vpc_network (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.VpcNetwork): + Vpc network. + + This field is a member of `oneof`_ ``network``. + """ + + class BatchComputeResources(proto.Message): + r"""Batch compute resources associated with the task. + + Attributes: + executors_count (int): + Optional. Total number of job executors. Executor Count + should be between 2 and 100. [Default=2] + max_executors_count (int): + Optional. Max configurable executors. If max_executors_count + > executors_count, then auto-scaling is enabled. Max + Executor Count should be between 2 and 1000. [Default=1000] + """ + + executors_count: int = proto.Field( + proto.INT32, + number=1, + ) + max_executors_count: int = proto.Field( + proto.INT32, + number=2, + ) + + class ContainerImageRuntime(proto.Message): + r"""Container Image Runtime Configuration used with Batch + execution. + + Attributes: + image (str): + Optional. Container image to use. + java_jars (MutableSequence[str]): + Optional. A list of Java JARS to add to the + classpath. Valid input includes Cloud Storage + URIs to Jar binaries. For example, + gs://bucket-name/my/path/to/file.jar + python_packages (MutableSequence[str]): + Optional. A list of python packages to be + installed. Valid formats include Cloud Storage + URI to a PIP installable library. For example, + gs://bucket-name/my/path/to/lib.tar.gz + properties (MutableMapping[str, str]): + Optional. Override to common configuration of open source + components installed on the Dataproc cluster. The properties + to set on daemon config files. Property keys are specified + in ``prefix:property`` format, for example + ``core:hadoop.tmp.dir``. For more information, see `Cluster + properties `__. + """ + + image: str = proto.Field( + proto.STRING, + number=1, + ) + java_jars: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + class VpcNetwork(proto.Message): + r"""Cloud VPC Network used to run the infrastructure. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. The Cloud VPC network in which the + job is run. By default, the Cloud VPC network + named Default within the project is used. + + This field is a member of `oneof`_ ``network_name``. + sub_network (str): + Optional. The Cloud VPC sub-network in which + the job is run. + + This field is a member of `oneof`_ ``network_name``. + network_tags (MutableSequence[str]): + Optional. List of network tags to apply to + the job. + """ + + network: str = proto.Field( + proto.STRING, + number=1, + oneof='network_name', + ) + sub_network: str = proto.Field( + proto.STRING, + number=2, + oneof='network_name', + ) + network_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + batch: 'Task.InfrastructureSpec.BatchComputeResources' = proto.Field( + proto.MESSAGE, + number=52, + oneof='resources', + message='Task.InfrastructureSpec.BatchComputeResources', + ) + container_image: 'Task.InfrastructureSpec.ContainerImageRuntime' = proto.Field( + proto.MESSAGE, + number=101, + oneof='runtime', + message='Task.InfrastructureSpec.ContainerImageRuntime', + ) + vpc_network: 'Task.InfrastructureSpec.VpcNetwork' = proto.Field( + proto.MESSAGE, + number=150, + oneof='network', + message='Task.InfrastructureSpec.VpcNetwork', + ) + + class TriggerSpec(proto.Message): + r"""Task scheduling and trigger settings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dataplex_v1.types.Task.TriggerSpec.Type): + Required. Immutable. Trigger type of the + user-specified Task. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The first run of the task will be after this time. + If not specified, the task will run shortly after being + submitted if ON_DEMAND and based on the schedule if + RECURRING. + disabled (bool): + Optional. Prevent the task from executing. + This does not cancel already running tasks. It + is intended to temporarily disable RECURRING + tasks. + max_retries (int): + Optional. Number of retry attempts before + aborting. Set to zero to never attempt to retry + a failed task. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running tasks periodically. To explicitly set a timezone + to the cron tab, apply a prefix in the cron tab: + "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The + ${IANA_TIME_ZONE} may only be a valid string from IANA time + zone database. For example, + ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. This field is required + for RECURRING tasks. + + This field is a member of `oneof`_ ``trigger``. + """ + class Type(proto.Enum): + r"""Determines how often and when the job will run. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified trigger type. + ON_DEMAND (1): + The task runs one-time shortly after Task + Creation. + RECURRING (2): + The task is scheduled to run periodically. + """ + TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + RECURRING = 2 + + type_: 'Task.TriggerSpec.Type' = proto.Field( + proto.ENUM, + number=5, + enum='Task.TriggerSpec.Type', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + max_retries: int = proto.Field( + proto.INT32, + number=7, + ) + schedule: str = proto.Field( + proto.STRING, + number=100, + oneof='trigger', + ) + + class ExecutionSpec(proto.Message): + r"""Execution related settings, like retry and service_account. + + Attributes: + args (MutableMapping[str, str]): + Optional. The arguments to pass to the task. The args can + use placeholders of the format ${placeholder} as part of + key/value string. These will be interpolated before passing + the args to the driver. Currently supported placeholders: + + - ${task_id} + - ${job_time} To pass positional args, set the key as + TASK_ARGS. The value should be a comma-separated string + of all the positional arguments. To use a delimiter other + than comma, refer to + https://cloud.google.com/sdk/gcloud/reference/topic/escaping. + In case of other keys being present in the args, then + TASK_ARGS will be passed as the last argument. + service_account (str): + Required. Service account to use to execute a + task. If not provided, the default Compute + service account for the project is used. + project (str): + Optional. The project in which jobs are run. By default, the + project containing the Lake is used. If a project is + provided, the + [ExecutionSpec.service_account][google.cloud.dataplex.v1.Task.ExecutionSpec.service_account] + must belong to this project. + max_job_execution_lifetime (google.protobuf.duration_pb2.Duration): + Optional. The maximum duration after which + the job execution is expired. + kms_key (str): + Optional. The Cloud KMS key to use for encryption, of the + form: + ``projects/{project_number}/locations/{location_id}/keyRings/{key-ring-name}/cryptoKeys/{key-name}``. + """ + + args: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + project: str = proto.Field( + proto.STRING, + number=7, + ) + max_job_execution_lifetime: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + kms_key: str = proto.Field( + proto.STRING, + number=9, + ) + + class SparkTaskConfig(proto.Message): + r"""User-specified config for running a Spark task. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The Cloud Storage URI of the jar file that contains the main + class. The execution args are passed in as a sequence of + named process arguments (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in ``jar_file_uris``. The execution args are + passed in as a sequence of named process arguments + (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + python_script_file (str): + The Gcloud Storage URI of the main Python file to use as the + driver. Must be a .py file. The execution args are passed in + as a sequence of named process arguments (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + sql_script_file (str): + A reference to a query file. This should be the Cloud + Storage URI of the query file. The execution args are used + to declare a set of script variables (``set key="value";``). + + This field is a member of `oneof`_ ``driver``. + sql_script (str): + The query text. The execution args are used to declare a set + of script variables (``set key="value";``). + + This field is a member of `oneof`_ ``driver``. + file_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of files to be + placed in the working directory of each + executor. + archive_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of archives to + be extracted into the working directory of each + executor. Supported file types: .jar, .tar, + .tar.gz, .tgz, and .zip. + infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): + Optional. Infrastructure specification for + the execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=100, + oneof='driver', + ) + main_class: str = proto.Field( + proto.STRING, + number=101, + oneof='driver', + ) + python_script_file: str = proto.Field( + proto.STRING, + number=102, + oneof='driver', + ) + sql_script_file: str = proto.Field( + proto.STRING, + number=104, + oneof='driver', + ) + sql_script: str = proto.Field( + proto.STRING, + number=105, + oneof='driver', + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( + proto.MESSAGE, + number=6, + message='Task.InfrastructureSpec', + ) + + class NotebookTaskConfig(proto.Message): + r"""Config for running scheduled notebooks. + + Attributes: + notebook (str): + Required. Path to input notebook. This can be the Cloud + Storage URI of the notebook file or the path to a Notebook + Content. The execution args are accessible as environment + variables (``TASK_key=value``). + infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): + Optional. Infrastructure specification for + the execution. + file_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of files to be + placed in the working directory of each + executor. + archive_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of archives to + be extracted into the working directory of each + executor. Supported file types: .jar, .tar, + .tar.gz, .tgz, and .zip. + """ + + notebook: str = proto.Field( + proto.STRING, + number=4, + ) + infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( + proto.MESSAGE, + number=3, + message='Task.InfrastructureSpec', + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + class ExecutionStatus(proto.Message): + r"""Status of the task execution (e.g. Jobs). + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of the status. + latest_job (google.cloud.dataplex_v1.types.Job): + Output only. latest job execution + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + latest_job: 'Job' = proto.Field( + proto.MESSAGE, + number=9, + message='Job', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=7, + enum=resources.State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + trigger_spec: TriggerSpec = proto.Field( + proto.MESSAGE, + number=100, + message=TriggerSpec, + ) + execution_spec: ExecutionSpec = proto.Field( + proto.MESSAGE, + number=101, + message=ExecutionSpec, + ) + execution_status: ExecutionStatus = proto.Field( + proto.MESSAGE, + number=201, + message=ExecutionStatus, + ) + spark: SparkTaskConfig = proto.Field( + proto.MESSAGE, + number=300, + oneof='config', + message=SparkTaskConfig, + ) + notebook: NotebookTaskConfig = proto.Field( + proto.MESSAGE, + number=302, + oneof='config', + message=NotebookTaskConfig, + ) + + +class Job(proto.Message): + r"""A job represents an instance of a task. + + Attributes: + name (str): + Output only. The relative resource name of the job, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + uid (str): + Output only. System generated globally unique + ID for the job. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the job was + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the job ended. + state (google.cloud.dataplex_v1.types.Job.State): + Output only. Execution state for the job. + retry_count (int): + Output only. The number of times the job has + been retried (excluding the initial attempt). + service (google.cloud.dataplex_v1.types.Job.Service): + Output only. The underlying service running a + job. + service_job (str): + Output only. The full resource name for the + job run under a particular service. + message (str): + Output only. Additional information about the + current state. + labels (MutableMapping[str, str]): + Output only. User-defined labels for the + task. + trigger (google.cloud.dataplex_v1.types.Job.Trigger): + Output only. Job execution trigger. + execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): + Output only. Spec related to how a task is + executed. + """ + class Service(proto.Enum): + r""" + + Values: + SERVICE_UNSPECIFIED (0): + Service used to run the job is unspecified. + DATAPROC (1): + Dataproc service is used to run this job. + """ + SERVICE_UNSPECIFIED = 0 + DATAPROC = 1 + + class State(proto.Enum): + r""" + + Values: + STATE_UNSPECIFIED (0): + The job state is unknown. + RUNNING (1): + The job is running. + CANCELLING (2): + The job is cancelling. + CANCELLED (3): + The job cancellation was successful. + SUCCEEDED (4): + The job completed successfully. + FAILED (5): + The job is no longer running due to an error. + ABORTED (6): + The job was cancelled outside of Dataplex. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + FAILED = 5 + ABORTED = 6 + + class Trigger(proto.Enum): + r"""Job execution trigger. + + Values: + TRIGGER_UNSPECIFIED (0): + The trigger is unspecified. + TASK_CONFIG (1): + The job was triggered by Dataplex based on + trigger spec from task definition. + RUN_REQUEST (2): + The job was triggered by the explicit call of + Task API. + """ + TRIGGER_UNSPECIFIED = 0 + TASK_CONFIG = 1 + RUN_REQUEST = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + retry_count: int = proto.Field( + proto.UINT32, + number=6, + ) + service: Service = proto.Field( + proto.ENUM, + number=7, + enum=Service, + ) + service_job: str = proto.Field( + proto.STRING, + number=8, + ) + message: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + trigger: Trigger = proto.Field( + proto.ENUM, + number=11, + enum=Trigger, + ) + execution_spec: 'Task.ExecutionSpec' = proto.Field( + proto.MESSAGE, + number=100, + message='Task.ExecutionSpec', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py new file mode 100644 index 000000000000..a1ad2c6fe6c7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-dataplex' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dataplex_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dataplex_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py new file mode 100644 index 000000000000..7490891a58d0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py new file mode 100644 index 000000000000..803f83d5f306 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py new file mode 100644 index 000000000000..755b258dfcdc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py new file mode 100644 index 000000000000..86dc1d13724d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py new file mode 100644 index 000000000000..05e1e13996e8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py new file mode 100644 index 000000000000..4ffcdc99175a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py new file mode 100644 index 000000000000..8ce69f86a352 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py new file mode 100644 index 000000000000..a0b18212967b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py new file mode 100644 index 000000000000..545b7267de18 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py new file mode 100644 index 000000000000..6bed1ae44985 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py new file mode 100644 index 000000000000..1ecd3586aee7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py new file mode 100644 index 000000000000..022008b13e72 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py new file mode 100644 index 000000000000..787280513289 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py new file mode 100644 index 000000000000..ce89096f240b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py new file mode 100644 index 000000000000..3351c7f53839 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py new file mode 100644 index 000000000000..019b3acb22c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py new file mode 100644 index 000000000000..b7cb7aa70933 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py new file mode 100644 index 000000000000..6c772dc5c765 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py new file mode 100644 index 000000000000..666df269d16d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py new file mode 100644 index 000000000000..91f695f2841e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py new file mode 100644 index 000000000000..81d3d81b8f26 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_aspect_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py new file mode 100644 index 000000000000..2c3b728838b7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_aspect_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py new file mode 100644 index 000000000000..b996bab77591 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py new file mode 100644 index 000000000000..e0669aadb927 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py new file mode 100644 index 000000000000..95c037a589b6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py new file mode 100644 index 000000000000..b53082957bfa --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py new file mode 100644 index 000000000000..58083a001b8a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py new file mode 100644 index 000000000000..4098fc417a9e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py new file mode 100644 index 000000000000..144996fdc0c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py new file mode 100644 index 000000000000..f2c032d11d82 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py new file mode 100644 index 000000000000..64778a1cfb7a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAspectTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListAspectTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListAspectTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py new file mode 100644 index 000000000000..9a8cd1eec4cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAspectTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListAspectTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListAspectTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py new file mode 100644 index 000000000000..fc611110b8d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py new file mode 100644 index 000000000000..faabd3c1cea0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py new file mode 100644 index 000000000000..91a2ad641c95 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py new file mode 100644 index 000000000000..61cade3ceec3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py new file mode 100644 index 000000000000..a85ebb571be1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py new file mode 100644 index 000000000000..f0fff3cc52c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py new file mode 100644 index 000000000000..8d07e30862df --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py new file mode 100644 index 000000000000..7c5043546f57 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py new file mode 100644 index 000000000000..4b90f27553bb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_LookupEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py new file mode 100644 index 000000000000..0a1d7dfbd187 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_LookupEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py new file mode 100644 index 000000000000..9115fb45d14b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_SearchEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_SearchEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py new file mode 100644 index 000000000000..adc2a58f4e3d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_SearchEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_SearchEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py new file mode 100644 index 000000000000..4d5cbf23aadc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py new file mode 100644 index 000000000000..9408a61c68e2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py new file mode 100644 index 000000000000..e88986c399d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py new file mode 100644 index 000000000000..f8e0716939f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py new file mode 100644 index 000000000000..482e9db7a27f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py new file mode 100644 index 000000000000..69fde35c3515 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py new file mode 100644 index 000000000000..6fc55c02c3ab --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py new file mode 100644 index 000000000000..2f240368f075 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py new file mode 100644 index 000000000000..353f1664db39 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_CreateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = await client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_CreateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py new file mode 100644 index 000000000000..80aa3386d3ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_CreateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_CreateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py new file mode 100644 index 000000000000..195baf10d664 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_DeleteContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_content(request=request) + + +# [END dataplex_v1_generated_ContentService_DeleteContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py new file mode 100644 index 000000000000..c56872e3a3f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_DeleteContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + +# [END dataplex_v1_generated_ContentService_DeleteContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py new file mode 100644 index 000000000000..3c82c57a70ad --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py new file mode 100644 index 000000000000..dc25faccfde3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py new file mode 100644 index 000000000000..b9f3c8d527cf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py new file mode 100644 index 000000000000..1b6ca2879922 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py new file mode 100644 index 000000000000..1d591e9fc2c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_ListContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_ContentService_ListContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py new file mode 100644 index 000000000000..ad07990ff1ce --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_ListContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_ContentService_ListContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py new file mode 100644 index 000000000000..c85e40228b20 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py new file mode 100644 index 000000000000..be43e3cb4138 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py new file mode 100644 index 000000000000..f7791f437bd3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py new file mode 100644 index 000000000000..6133c0d7cf57 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py new file mode 100644 index 000000000000..bef234c8b411 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_UpdateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = await client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_UpdateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py new file mode 100644 index 000000000000..5883d944b68c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_UpdateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_UpdateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py new file mode 100644 index 000000000000..21651df34ae5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_CreateDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_CreateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py new file mode 100644 index 000000000000..77776f6960df --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_CreateDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_CreateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py new file mode 100644 index 000000000000..06728d347dcc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_DeleteDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_DeleteDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py new file mode 100644 index 000000000000..d3b6b57e56cc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_DeleteDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_DeleteDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py new file mode 100644 index 000000000000..cb75e6e697af --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateDataQualityRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py new file mode 100644 index 000000000000..ad0deadc75f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateDataQualityRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py new file mode 100644 index 000000000000..676de83d3706 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py new file mode 100644 index 000000000000..e835b0ca591a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScanJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScanJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScanJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py new file mode 100644 index 000000000000..bddec8d6189d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScanJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScanJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScanJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py new file mode 100644 index 000000000000..fe36809dbf5b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py new file mode 100644 index 000000000000..9232f30a6311 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScanJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py new file mode 100644 index 000000000000..e7cb0443ec86 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScanJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py new file mode 100644 index 000000000000..51b2f86cfdd9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScans_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScans_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py new file mode 100644 index 000000000000..7325c27fda56 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScans_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScans_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py new file mode 100644 index 000000000000..98a6b2322fcf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_RunDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.run_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_RunDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py new file mode 100644 index 000000000000..e895451b7053 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_RunDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.run_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_RunDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py new file mode 100644 index 000000000000..124a401a492f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_UpdateDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_UpdateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py new file mode 100644 index 000000000000..558f26403249 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_UpdateDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_UpdateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py new file mode 100644 index 000000000000..a005a033674c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py new file mode 100644 index 000000000000..d182498546d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py new file mode 100644 index 000000000000..cab78ee20922 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py new file mode 100644 index 000000000000..d560872c9a07 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py new file mode 100644 index 000000000000..1217b679c08e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py new file mode 100644 index 000000000000..a6978897a170 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py new file mode 100644 index 000000000000..22ddcc5604ff --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py new file mode 100644 index 000000000000..4576cb8067f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py new file mode 100644 index 000000000000..84d600eb7954 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py new file mode 100644 index 000000000000..80ed777ff441 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py new file mode 100644 index 000000000000..4c5ef9e4b313 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py new file mode 100644 index 000000000000..08883b505729 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py new file mode 100644 index 000000000000..350639322e18 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py new file mode 100644 index 000000000000..94a95946fcd5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py new file mode 100644 index 000000000000..94f2288be733 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py new file mode 100644 index 000000000000..2a1adb57e6df --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py new file mode 100644 index 000000000000..f66f77ea22a3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py new file mode 100644 index 000000000000..15f085db3c20 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py new file mode 100644 index 000000000000..207e67a7215a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributeBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py new file mode 100644 index 000000000000..9139b66840ed --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributeBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py new file mode 100644 index 000000000000..6e9569b3ea57 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py new file mode 100644 index 000000000000..16454380d76d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py new file mode 100644 index 000000000000..6a1b36361816 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py new file mode 100644 index 000000000000..cba0a7942e3b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py new file mode 100644 index 000000000000..ceb29efa2ffa --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py new file mode 100644 index 000000000000..a0a822815f7f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py new file mode 100644 index 000000000000..39fbd39776d8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py new file mode 100644 index 000000000000..0174b1fe11cd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py new file mode 100644 index 000000000000..cf45b97b2582 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py new file mode 100644 index 000000000000..c1ff22cfad5b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py new file mode 100644 index 000000000000..3c62de319a83 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_job(request=request) + + +# [END dataplex_v1_generated_DataplexService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py new file mode 100644 index 000000000000..0f03e83feca8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_job(request=request) + + +# [END dataplex_v1_generated_DataplexService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py new file mode 100644 index 000000000000..22703812444a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py new file mode 100644 index 000000000000..d11ffbfbfb6a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py new file mode 100644 index 000000000000..04e8b56fc7f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py new file mode 100644 index 000000000000..572c041e342b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py new file mode 100644 index 000000000000..510fe8247499 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py new file mode 100644 index 000000000000..f18dc0a47c48 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py new file mode 100644 index 000000000000..2709109a6790 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py new file mode 100644 index 000000000000..8bb4de8c071c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py new file mode 100644 index 000000000000..0148ee8fc210 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py new file mode 100644 index 000000000000..734e213bee7f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py new file mode 100644 index 000000000000..7fcc4e675180 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py new file mode 100644 index 000000000000..45f0e36fbcb7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py new file mode 100644 index 000000000000..bb626ad110c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py new file mode 100644 index 000000000000..f9fe6a103eea --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py new file mode 100644 index 000000000000..72b244c54958 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py new file mode 100644 index 000000000000..bc60e3d779cf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py new file mode 100644 index 000000000000..744bb671e334 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py new file mode 100644 index 000000000000..346621184099 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py new file mode 100644 index 000000000000..5c26d14108e9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py new file mode 100644 index 000000000000..ca175cbc639e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py new file mode 100644 index 000000000000..00d561665ef6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_asset(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py new file mode 100644 index 000000000000..b5e8b7ec2b41 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = client.get_asset(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py new file mode 100644 index 000000000000..3188ccf8d140 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py new file mode 100644 index 000000000000..73399effb4c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py new file mode 100644 index 000000000000..cd8194f96353 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py new file mode 100644 index 000000000000..79102a31e196 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py new file mode 100644 index 000000000000..0d71549766aa --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lake(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py new file mode 100644 index 000000000000..6089d0fd3080 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = client.get_lake(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py new file mode 100644 index 000000000000..a80b63d8165b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py new file mode 100644 index 000000000000..ac88e7053402 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py new file mode 100644 index 000000000000..dae75bef30b9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py new file mode 100644 index 000000000000..bd09ecb2c2be --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py new file mode 100644 index 000000000000..90f53390f676 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssetActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssetActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssetActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py new file mode 100644 index 000000000000..a449f1cb65db --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssetActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssetActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssetActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py new file mode 100644 index 000000000000..8e07a6a1e784 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py new file mode 100644 index 000000000000..484a7db1eaeb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py new file mode 100644 index 000000000000..164415a6498b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListEnvironments_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py new file mode 100644 index 000000000000..79b7fd642ab6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListEnvironments_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py new file mode 100644 index 000000000000..2de64cc90520 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py new file mode 100644 index 000000000000..708192b9bd03 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py new file mode 100644 index 000000000000..3a9296185703 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakeActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakeActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakeActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py new file mode 100644 index 000000000000..9b3333ce8555 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakeActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakeActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakeActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py new file mode 100644 index 000000000000..b312e9544b9a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py new file mode 100644 index 000000000000..e2fc0d0e6b0d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py new file mode 100644 index 000000000000..0b265969c08f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListSessions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py new file mode 100644 index 000000000000..7fc0d0de9675 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListSessions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py new file mode 100644 index 000000000000..3ebe50888279 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListTasks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py new file mode 100644 index 000000000000..b6a69abd53aa --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListTasks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py new file mode 100644 index 000000000000..55bf491493c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZoneActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZoneActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZoneActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py new file mode 100644 index 000000000000..b1a4cd7d39a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZoneActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZoneActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZoneActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py new file mode 100644 index 000000000000..4387ef13b0bf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZones_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZones_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py new file mode 100644 index 000000000000..d27df0d9fee2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZones_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZones_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py new file mode 100644 index 000000000000..9d9d81c26094 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_RunTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_RunTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py new file mode 100644 index 000000000000..53e68cc087c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_RunTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_RunTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py new file mode 100644 index 000000000000..2e52a26e6a0c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py new file mode 100644 index 000000000000..89edd8ed6676 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py new file mode 100644 index 000000000000..64b312afdd8b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py new file mode 100644 index 000000000000..6475b2715973 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py new file mode 100644 index 000000000000..2b5c336fa65e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py new file mode 100644 index 000000000000..ab9737be9bd8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py new file mode 100644 index 000000000000..486735063aed --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py new file mode 100644 index 000000000000..245be1d6a4ee --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py new file mode 100644 index 000000000000..fae1f8d4a5f0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py new file mode 100644 index 000000000000..d12cf99292a0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py new file mode 100644 index 000000000000..646999836556 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreateEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = await client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py new file mode 100644 index 000000000000..93fa897714bf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreateEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py new file mode 100644 index 000000000000..b26600eefc21 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreatePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = await client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreatePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py new file mode 100644 index 000000000000..5b7dc07cd1cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreatePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreatePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py new file mode 100644 index 000000000000..299fff1ade47 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeleteEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + await client.delete_entity(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeleteEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py new file mode 100644 index 000000000000..c89c6dbeac1f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeleteEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeleteEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py new file mode 100644 index 000000000000..e3d17ce188a7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeletePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_partition(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeletePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py new file mode 100644 index 000000000000..8bda087a6447 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeletePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeletePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py new file mode 100644 index 000000000000..040c39c0a4c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py new file mode 100644 index 000000000000..5ee30bf5fa57 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py new file mode 100644 index 000000000000..b8173acc5686 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetPartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetPartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py new file mode 100644 index 000000000000..2545e37f20a7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetPartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetPartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py new file mode 100644 index 000000000000..515797203908 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListEntities_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py new file mode 100644 index 000000000000..ba07568fef8d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListEntities_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py new file mode 100644 index 000000000000..b768b015e2f4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListPartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListPartitions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py new file mode 100644 index 000000000000..ae571b8a8773 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListPartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListPartitions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py new file mode 100644 index 000000000000..bd878e0fb8e7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_UpdateEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = await client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_UpdateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py new file mode 100644 index 000000000000..5362969247ef --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_UpdateEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_UpdateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json new file mode 100644 index 000000000000..a7eb15b2cc0d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -0,0 +1,16421 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dataplex.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dataplex", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "entry_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_type" + }, + "description": "Sample for CreateEntryType", + "file": "dataplex_v1_generated_catalog_service_create_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "entry_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_entry_type" + }, + "description": "Sample for CreateEntryType", + "file": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "dataplex_v1_generated_catalog_service_create_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "dataplex_v1_generated_catalog_service_create_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_entry_type" + }, + "description": "Sample for DeleteEntryType", + "file": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_entry_type" + }, + "description": "Sample for DeleteEntryType", + "file": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "dataplex_v1_generated_catalog_service_delete_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "dataplex_v1_generated_catalog_service_delete_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" + }, + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" + }, + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" + }, + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" + }, + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_aspect_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListAspectTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager", + "shortName": "list_aspect_types" + }, + "description": "Sample for ListAspectTypes", + "file": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListAspectTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager", + "shortName": "list_aspect_types" + }, + "description": "Sample for ListAspectTypes", + "file": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "dataplex_v1_generated_catalog_service_list_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "dataplex_v1_generated_catalog_service_list_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_groups", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager", + "shortName": "list_entry_types" + }, + "description": "Sample for ListEntryTypes", + "file": "dataplex_v1_generated_catalog_service_list_entry_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager", + "shortName": "list_entry_types" + }, + "description": "Sample for ListEntryTypes", + "file": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.lookup_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "dataplex_v1_generated_catalog_service_lookup_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_lookup_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.search_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "SearchEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager", + "shortName": "search_entries" + }, + "description": "Sample for SearchEntries", + "file": "dataplex_v1_generated_catalog_service_search_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_search_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.search_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "SearchEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager", + "shortName": "search_entries" + }, + "description": "Sample for SearchEntries", + "file": "dataplex_v1_generated_catalog_service_search_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_search_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_aspect_type" + }, + "description": "Sample for UpdateAspectType", + "file": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_aspect_type" + }, + "description": "Sample for UpdateAspectType", + "file": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_update_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_entry_type" + }, + "description": "Sample for UpdateEntryType", + "file": "dataplex_v1_generated_catalog_service_update_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_entry_type" + }, + "description": "Sample for UpdateEntryType", + "file": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "dataplex_v1_generated_catalog_service_update_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "dataplex_v1_generated_catalog_service_update_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.create_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "CreateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "create_content" + }, + "description": "Sample for CreateContent", + "file": "dataplex_v1_generated_content_service_create_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_CreateContent_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_create_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.create_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "CreateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "create_content" + }, + "description": "Sample for CreateContent", + "file": "dataplex_v1_generated_content_service_create_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_CreateContent_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_create_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.delete_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "DeleteContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_content" + }, + "description": "Sample for DeleteContent", + "file": "dataplex_v1_generated_content_service_delete_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_delete_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.delete_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "DeleteContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_content" + }, + "description": "Sample for DeleteContent", + "file": "dataplex_v1_generated_content_service_delete_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_delete_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "get_content" + }, + "description": "Sample for GetContent", + "file": "dataplex_v1_generated_content_service_get_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "get_content" + }, + "description": "Sample for GetContent", + "file": "dataplex_v1_generated_content_service_get_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "dataplex_v1_generated_content_service_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "dataplex_v1_generated_content_service_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.list_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "ListContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager", + "shortName": "list_content" + }, + "description": "Sample for ListContent", + "file": "dataplex_v1_generated_content_service_list_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_ListContent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_list_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.list_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "ListContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager", + "shortName": "list_content" + }, + "description": "Sample for ListContent", + "file": "dataplex_v1_generated_content_service_list_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_ListContent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_list_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "dataplex_v1_generated_content_service_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "dataplex_v1_generated_content_service_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "dataplex_v1_generated_content_service_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.update_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "UpdateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "update_content" + }, + "description": "Sample for UpdateContent", + "file": "dataplex_v1_generated_content_service_update_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_update_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.update_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "UpdateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "update_content" + }, + "description": "Sample for UpdateContent", + "file": "dataplex_v1_generated_content_service_update_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_update_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.create_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "CreateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "data_scan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_scan" + }, + "description": "Sample for CreateDataScan", + "file": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "CreateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "data_scan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_scan" + }, + "description": "Sample for CreateDataScan", + "file": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.delete_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "DeleteDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_scan" + }, + "description": "Sample for DeleteDataScan", + "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "DeleteDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_scan" + }, + "description": "Sample for DeleteDataScan", + "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.generate_data_quality_rules", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GenerateDataQualityRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", + "shortName": "generate_data_quality_rules" + }, + "description": "Sample for GenerateDataQualityRules", + "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GenerateDataQualityRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", + "shortName": "generate_data_quality_rules" + }, + "description": "Sample for GenerateDataQualityRules", + "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScanJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScanJob", + "shortName": "get_data_scan_job" + }, + "description": "Sample for GetDataScanJob", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScanJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScanJob", + "shortName": "get_data_scan_job" + }, + "description": "Sample for GetDataScanJob", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScan", + "shortName": "get_data_scan" + }, + "description": "Sample for GetDataScan", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScan", + "shortName": "get_data_scan" + }, + "description": "Sample for GetDataScan", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scan_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScanJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager", + "shortName": "list_data_scan_jobs" + }, + "description": "Sample for ListDataScanJobs", + "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScanJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager", + "shortName": "list_data_scan_jobs" + }, + "description": "Sample for ListDataScanJobs", + "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scans", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager", + "shortName": "list_data_scans" + }, + "description": "Sample for ListDataScans", + "file": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager", + "shortName": "list_data_scans" + }, + "description": "Sample for ListDataScans", + "file": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.run_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "RunDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", + "shortName": "run_data_scan" + }, + "description": "Sample for RunDataScan", + "file": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "RunDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", + "shortName": "run_data_scan" + }, + "description": "Sample for RunDataScan", + "file": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.update_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "UpdateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_scan" + }, + "description": "Sample for UpdateDataScan", + "file": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "UpdateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_scan" + }, + "description": "Sample for UpdateDataScan", + "file": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "data_attribute_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_attribute_binding" + }, + "description": "Sample for CreateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "data_attribute_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_attribute_binding" + }, + "description": "Sample for CreateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "data_attribute_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_attribute" + }, + "description": "Sample for CreateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "data_attribute_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_attribute" + }, + "description": "Sample for CreateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "data_taxonomy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_taxonomy" + }, + "description": "Sample for CreateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "data_taxonomy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_taxonomy" + }, + "description": "Sample for CreateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_attribute_binding" + }, + "description": "Sample for DeleteDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_attribute_binding" + }, + "description": "Sample for DeleteDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_attribute" + }, + "description": "Sample for DeleteDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_attribute" + }, + "description": "Sample for DeleteDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_taxonomy" + }, + "description": "Sample for DeleteDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_taxonomy" + }, + "description": "Sample for DeleteDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", + "shortName": "get_data_attribute_binding" + }, + "description": "Sample for GetDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", + "shortName": "get_data_attribute_binding" + }, + "description": "Sample for GetDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttribute", + "shortName": "get_data_attribute" + }, + "description": "Sample for GetDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttribute", + "shortName": "get_data_attribute" + }, + "description": "Sample for GetDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", + "shortName": "get_data_taxonomy" + }, + "description": "Sample for GetDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", + "shortName": "get_data_taxonomy" + }, + "description": "Sample for GetDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attribute_bindings", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributeBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager", + "shortName": "list_data_attribute_bindings" + }, + "description": "Sample for ListDataAttributeBindings", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributeBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager", + "shortName": "list_data_attribute_bindings" + }, + "description": "Sample for ListDataAttributeBindings", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attributes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager", + "shortName": "list_data_attributes" + }, + "description": "Sample for ListDataAttributes", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager", + "shortName": "list_data_attributes" + }, + "description": "Sample for ListDataAttributes", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_taxonomies", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager", + "shortName": "list_data_taxonomies" + }, + "description": "Sample for ListDataTaxonomies", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager", + "shortName": "list_data_taxonomies" + }, + "description": "Sample for ListDataTaxonomies", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_attribute_binding" + }, + "description": "Sample for UpdateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_attribute_binding" + }, + "description": "Sample for UpdateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_attribute" + }, + "description": "Sample for UpdateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_attribute" + }, + "description": "Sample for UpdateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_taxonomy" + }, + "description": "Sample for UpdateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_taxonomy" + }, + "description": "Sample for UpdateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "dataplex_v1_generated_dataplex_service_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.cancel_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "asset_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_asset" + }, + "description": "Sample for CreateAsset", + "file": "dataplex_v1_generated_dataplex_service_create_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "asset_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_asset" + }, + "description": "Sample for CreateAsset", + "file": "dataplex_v1_generated_dataplex_service_create_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "environment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "environment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "lake_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_lake" + }, + "description": "Sample for CreateLake", + "file": "dataplex_v1_generated_dataplex_service_create_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "lake_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_lake" + }, + "description": "Sample for CreateLake", + "file": "dataplex_v1_generated_dataplex_service_create_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "task_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_task" + }, + "description": "Sample for CreateTask", + "file": "dataplex_v1_generated_dataplex_service_create_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "task_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_task" + }, + "description": "Sample for CreateTask", + "file": "dataplex_v1_generated_dataplex_service_create_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "dataplex_v1_generated_dataplex_service_create_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "dataplex_v1_generated_dataplex_service_create_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_asset" + }, + "description": "Sample for DeleteAsset", + "file": "dataplex_v1_generated_dataplex_service_delete_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_asset" + }, + "description": "Sample for DeleteAsset", + "file": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dataplex_v1_generated_dataplex_service_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_lake" + }, + "description": "Sample for DeleteLake", + "file": "dataplex_v1_generated_dataplex_service_delete_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_lake" + }, + "description": "Sample for DeleteLake", + "file": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_task" + }, + "description": "Sample for DeleteTask", + "file": "dataplex_v1_generated_dataplex_service_delete_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_task" + }, + "description": "Sample for DeleteTask", + "file": "dataplex_v1_generated_dataplex_service_delete_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "dataplex_v1_generated_dataplex_service_delete_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Asset", + "shortName": "get_asset" + }, + "description": "Sample for GetAsset", + "file": "dataplex_v1_generated_dataplex_service_get_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Asset", + "shortName": "get_asset" + }, + "description": "Sample for GetAsset", + "file": "dataplex_v1_generated_dataplex_service_get_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dataplex_v1_generated_dataplex_service_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dataplex_v1_generated_dataplex_service_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataplex_v1_generated_dataplex_service_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataplex_v1_generated_dataplex_service_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Lake", + "shortName": "get_lake" + }, + "description": "Sample for GetLake", + "file": "dataplex_v1_generated_dataplex_service_get_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetLake_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Lake", + "shortName": "get_lake" + }, + "description": "Sample for GetLake", + "file": "dataplex_v1_generated_dataplex_service_get_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetLake_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Task", + "shortName": "get_task" + }, + "description": "Sample for GetTask", + "file": "dataplex_v1_generated_dataplex_service_get_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Task", + "shortName": "get_task" + }, + "description": "Sample for GetTask", + "file": "dataplex_v1_generated_dataplex_service_get_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "dataplex_v1_generated_dataplex_service_get_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetZone_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "dataplex_v1_generated_dataplex_service_get_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetZone_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_asset_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssetActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager", + "shortName": "list_asset_actions" + }, + "description": "Sample for ListAssetActions", + "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssetActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager", + "shortName": "list_asset_actions" + }, + "description": "Sample for ListAssetActions", + "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_assets", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager", + "shortName": "list_assets" + }, + "description": "Sample for ListAssets", + "file": "dataplex_v1_generated_dataplex_service_list_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_assets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_assets", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager", + "shortName": "list_assets" + }, + "description": "Sample for ListAssets", + "file": "dataplex_v1_generated_dataplex_service_list_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_assets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dataplex_v1_generated_dataplex_service_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_environments", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dataplex_v1_generated_dataplex_service_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataplex_v1_generated_dataplex_service_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lake_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakeActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager", + "shortName": "list_lake_actions" + }, + "description": "Sample for ListLakeActions", + "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakeActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager", + "shortName": "list_lake_actions" + }, + "description": "Sample for ListLakeActions", + "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lakes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager", + "shortName": "list_lakes" + }, + "description": "Sample for ListLakes", + "file": "dataplex_v1_generated_dataplex_service_list_lakes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lakes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager", + "shortName": "list_lakes" + }, + "description": "Sample for ListLakes", + "file": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_sessions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "dataplex_v1_generated_dataplex_service_list_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_tasks", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager", + "shortName": "list_tasks" + }, + "description": "Sample for ListTasks", + "file": "dataplex_v1_generated_dataplex_service_list_tasks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_tasks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager", + "shortName": "list_tasks" + }, + "description": "Sample for ListTasks", + "file": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zone_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZoneActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager", + "shortName": "list_zone_actions" + }, + "description": "Sample for ListZoneActions", + "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZoneActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager", + "shortName": "list_zone_actions" + }, + "description": "Sample for ListZoneActions", + "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zones", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "dataplex_v1_generated_dataplex_service_list_zones_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZones_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zones_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zones", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "dataplex_v1_generated_dataplex_service_list_zones_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZones_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zones_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.run_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "RunTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", + "shortName": "run_task" + }, + "description": "Sample for RunTask", + "file": "dataplex_v1_generated_dataplex_service_run_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_RunTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_run_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.run_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "RunTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", + "shortName": "run_task" + }, + "description": "Sample for RunTask", + "file": "dataplex_v1_generated_dataplex_service_run_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_RunTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_run_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_asset" + }, + "description": "Sample for UpdateAsset", + "file": "dataplex_v1_generated_dataplex_service_update_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_asset" + }, + "description": "Sample for UpdateAsset", + "file": "dataplex_v1_generated_dataplex_service_update_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_lake" + }, + "description": "Sample for UpdateLake", + "file": "dataplex_v1_generated_dataplex_service_update_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_lake" + }, + "description": "Sample for UpdateLake", + "file": "dataplex_v1_generated_dataplex_service_update_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_task" + }, + "description": "Sample for UpdateTask", + "file": "dataplex_v1_generated_dataplex_service_update_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_task" + }, + "description": "Sample for UpdateTask", + "file": "dataplex_v1_generated_dataplex_service_update_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "dataplex_v1_generated_dataplex_service_update_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "dataplex_v1_generated_dataplex_service_update_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity", + "type": "google.cloud.dataplex_v1.types.Entity" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "create_entity" + }, + "description": "Sample for CreateEntity", + "file": "dataplex_v1_generated_metadata_service_create_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity", + "type": "google.cloud.dataplex_v1.types.Entity" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "create_entity" + }, + "description": "Sample for CreateEntity", + "file": "dataplex_v1_generated_metadata_service_create_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreatePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partition", + "type": "google.cloud.dataplex_v1.types.Partition" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "create_partition" + }, + "description": "Sample for CreatePartition", + "file": "dataplex_v1_generated_metadata_service_create_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreatePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partition", + "type": "google.cloud.dataplex_v1.types.Partition" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "create_partition" + }, + "description": "Sample for CreatePartition", + "file": "dataplex_v1_generated_metadata_service_create_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeleteEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity" + }, + "description": "Sample for DeleteEntity", + "file": "dataplex_v1_generated_metadata_service_delete_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeleteEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity" + }, + "description": "Sample for DeleteEntity", + "file": "dataplex_v1_generated_metadata_service_delete_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeletePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_partition" + }, + "description": "Sample for DeletePartition", + "file": "dataplex_v1_generated_metadata_service_delete_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeletePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_partition" + }, + "description": "Sample for DeletePartition", + "file": "dataplex_v1_generated_metadata_service_delete_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "get_entity" + }, + "description": "Sample for GetEntity", + "file": "dataplex_v1_generated_metadata_service_get_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "get_entity" + }, + "description": "Sample for GetEntity", + "file": "dataplex_v1_generated_metadata_service_get_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetPartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "get_partition" + }, + "description": "Sample for GetPartition", + "file": "dataplex_v1_generated_metadata_service_get_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetPartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "get_partition" + }, + "description": "Sample for GetPartition", + "file": "dataplex_v1_generated_metadata_service_get_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_entities", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager", + "shortName": "list_entities" + }, + "description": "Sample for ListEntities", + "file": "dataplex_v1_generated_metadata_service_list_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_entities", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager", + "shortName": "list_entities" + }, + "description": "Sample for ListEntities", + "file": "dataplex_v1_generated_metadata_service_list_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_partitions", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListPartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager", + "shortName": "list_partitions" + }, + "description": "Sample for ListPartitions", + "file": "dataplex_v1_generated_metadata_service_list_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListPartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager", + "shortName": "list_partitions" + }, + "description": "Sample for ListPartitions", + "file": "dataplex_v1_generated_metadata_service_list_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.update_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "update_entity" + }, + "description": "Sample for UpdateEntity", + "file": "dataplex_v1_generated_metadata_service_update_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_update_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.update_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "update_entity" + }, + "description": "Sample for UpdateEntity", + "file": "dataplex_v1_generated_metadata_service_update_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_update_entity_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py new file mode 100644 index 000000000000..6681941351bf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py @@ -0,0 +1,275 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataplexCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', ), + 'cancel_metadata_job': ('name', ), + 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), + 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), + 'create_content': ('parent', 'content', 'validate_only', ), + 'create_data_attribute': ('parent', 'data_attribute_id', 'data_attribute', 'validate_only', ), + 'create_data_attribute_binding': ('parent', 'data_attribute_binding_id', 'data_attribute_binding', 'validate_only', ), + 'create_data_scan': ('parent', 'data_scan', 'data_scan_id', 'validate_only', ), + 'create_data_taxonomy': ('parent', 'data_taxonomy_id', 'data_taxonomy', 'validate_only', ), + 'create_entity': ('parent', 'entity', 'validate_only', ), + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), + 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), + 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), + 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), + 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), + 'create_partition': ('parent', 'partition', 'validate_only', ), + 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), + 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), + 'delete_aspect_type': ('name', 'etag', ), + 'delete_asset': ('name', ), + 'delete_content': ('name', ), + 'delete_data_attribute': ('name', 'etag', ), + 'delete_data_attribute_binding': ('name', 'etag', ), + 'delete_data_scan': ('name', ), + 'delete_data_taxonomy': ('name', 'etag', ), + 'delete_entity': ('name', 'etag', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'etag', ), + 'delete_entry_type': ('name', 'etag', ), + 'delete_environment': ('name', ), + 'delete_lake': ('name', ), + 'delete_partition': ('name', 'etag', ), + 'delete_task': ('name', ), + 'delete_zone': ('name', ), + 'generate_data_quality_rules': ('name', ), + 'get_aspect_type': ('name', ), + 'get_asset': ('name', ), + 'get_content': ('name', 'view', ), + 'get_data_attribute': ('name', ), + 'get_data_attribute_binding': ('name', ), + 'get_data_scan': ('name', 'view', ), + 'get_data_scan_job': ('name', 'view', ), + 'get_data_taxonomy': ('name', ), + 'get_entity': ('name', 'view', ), + 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), + 'get_entry_group': ('name', ), + 'get_entry_type': ('name', ), + 'get_environment': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_job': ('name', ), + 'get_lake': ('name', ), + 'get_metadata_job': ('name', ), + 'get_partition': ('name', ), + 'get_task': ('name', ), + 'get_zone': ('name', ), + 'list_aspect_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_asset_actions': ('parent', 'page_size', 'page_token', ), + 'list_assets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_content': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_data_attribute_bindings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_attributes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_scan_jobs': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_data_scans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_taxonomies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_entities': ('parent', 'view', 'page_size', 'page_token', 'filter', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_jobs': ('parent', 'page_size', 'page_token', ), + 'list_lake_actions': ('parent', 'page_size', 'page_token', ), + 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_zone_actions': ('parent', 'page_size', 'page_token', ), + 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'lookup_entry': ('name', 'entry', 'view', 'aspect_types', 'paths', ), + 'run_data_scan': ('name', ), + 'run_task': ('name', 'labels', 'args', ), + 'search_entries': ('name', 'query', 'page_size', 'page_token', 'order_by', 'scope', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_aspect_type': ('aspect_type', 'update_mask', 'validate_only', ), + 'update_asset': ('update_mask', 'asset', 'validate_only', ), + 'update_content': ('update_mask', 'content', 'validate_only', ), + 'update_data_attribute': ('update_mask', 'data_attribute', 'validate_only', ), + 'update_data_attribute_binding': ('update_mask', 'data_attribute_binding', 'validate_only', ), + 'update_data_scan': ('data_scan', 'update_mask', 'validate_only', ), + 'update_data_taxonomy': ('update_mask', 'data_taxonomy', 'validate_only', ), + 'update_entity': ('entity', 'validate_only', ), + 'update_entry': ('entry', 'update_mask', 'allow_missing', 'delete_missing_aspects', 'aspect_keys', ), + 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), + 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), + 'update_environment': ('update_mask', 'environment', 'validate_only', ), + 'update_lake': ('update_mask', 'lake', 'validate_only', ), + 'update_task': ('update_mask', 'task', 'validate_only', ), + 'update_zone': ('update_mask', 'zone', 'validate_only', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataplexCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dataplex client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/setup.py b/owl-bot-staging/google-cloud-dataplex/v1/setup.py new file mode 100644 index 000000000000..d9494d49423d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/setup.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dataplex' + + +description = "Google Cloud Dataplex API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/dataplex/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..a81fb6bcd05c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py new file mode 100644 index 000000000000..801c1fbe291a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -0,0 +1,13193 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceAsyncClient +from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceClient +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.services.catalog_service import transports +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CatalogServiceClient._get_default_mtls_endpoint(None) is None + assert CatalogServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CatalogServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CatalogServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CatalogServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CatalogServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CatalogServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CatalogServiceClient._get_client_cert_source(None, False) is None + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CatalogServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CatalogServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CatalogServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CatalogServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CatalogServiceClient._get_universe_domain(None, None) == CatalogServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CatalogServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CatalogServiceClient, "grpc"), + (CatalogServiceAsyncClient, "grpc_asyncio"), +]) +def test_catalog_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CatalogServiceGrpcTransport, "grpc"), + (transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_catalog_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CatalogServiceClient, "grpc"), + (CatalogServiceAsyncClient, "grpc_asyncio"), +]) +def test_catalog_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_catalog_service_client_get_transport_class(): + transport = CatalogServiceClient.get_transport_class() + available_transports = [ + transports.CatalogServiceGrpcTransport, + ] + assert transport in available_transports + + transport = CatalogServiceClient.get_transport_class("grpc") + assert transport == transports.CatalogServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test_catalog_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "true"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "false"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_catalog_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CatalogServiceClient, CatalogServiceAsyncClient +]) +@mock.patch.object(CatalogServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceAsyncClient)) +def test_catalog_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CatalogServiceClient, CatalogServiceAsyncClient +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test_catalog_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_catalog_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_catalog_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_catalog_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CatalogServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_catalog_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryTypeRequest, + dict, +]) +def test_create_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryTypeRequest( + parent='parent_value', + entry_type_id='entry_type_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryTypeRequest( + parent='parent_value', + entry_type_id='entry_type_id_value', + ) + +def test_create_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc + request = {} + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry_type] = mock_rpc + + request = {} + await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_entry_type_async_from_dict(): + await test_create_entry_type_async(request_type=dict) + +def test_create_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_type( + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].entry_type_id + mock_val = 'entry_type_id_value' + assert arg == mock_val + + +def test_create_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_type( + catalog.CreateEntryTypeRequest(), + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_type( + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].entry_type_id + mock_val = 'entry_type_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_type( + catalog.CreateEntryTypeRequest(), + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryTypeRequest, + dict, +]) +def test_update_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryTypeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryTypeRequest( + ) + +def test_update_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc + request = {} + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry_type] = mock_rpc + + request = {} + await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_entry_type_async_from_dict(): + await test_update_entry_type_async(request_type=dict) + +def test_update_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryTypeRequest() + + request.entry_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_type.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryTypeRequest() + + request.entry_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_type.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_type( + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_type( + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryTypeRequest, + dict, +]) +def test_delete_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryTypeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryTypeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc + request = {} + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry_type] = mock_rpc + + request = {} + await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_entry_type_async_from_dict(): + await test_delete_entry_type_async(request_type=dict) + +def test_delete_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryTypesRequest, + dict, +]) +def test_list_entry_types(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_entry_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntryTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entry_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntryTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_entry_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc + request = {} + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entry_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entry_types] = mock_rpc + + request = {} + await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryTypesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_entry_types_async_from_dict(): + await test_list_entry_types_async(request_type=dict) + +def test_list_entry_types_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_types_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) + await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_types_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_types_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_types_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_types_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent='parent_value', + ) + + +def test_list_entry_types_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_types(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryType) + for i in results) +def test_list_entry_types_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_types_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.EntryType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_types_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryTypeRequest, + dict, +]) +def test_get_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + ) + response = client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.type_aliases == ['type_aliases_value'] + assert response.platform == 'platform_value' + assert response.system == 'system_value' + + +def test_get_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryTypeRequest( + name='name_value', + ) + +def test_get_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + request = {} + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry_type] = mock_rpc + + request = {} + await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + )) + response = await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.type_aliases == ['type_aliases_value'] + assert response.platform == 'platform_value' + assert response.system == 'system_value' + + +@pytest.mark.asyncio +async def test_get_entry_type_async_from_dict(): + await test_get_entry_type_async(request_type=dict) + +def test_get_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) + await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_type( + catalog.GetEntryTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_type( + catalog.GetEntryTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateAspectTypeRequest, + dict, +]) +def test_create_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateAspectTypeRequest( + parent='parent_value', + aspect_type_id='aspect_type_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateAspectTypeRequest( + parent='parent_value', + aspect_type_id='aspect_type_id_value', + ) + +def test_create_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc + request = {} + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_aspect_type] = mock_rpc + + request = {} + await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_aspect_type_async_from_dict(): + await test_create_aspect_type_async(request_type=dict) + +def test_create_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateAspectTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateAspectTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_aspect_type( + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].aspect_type_id + mock_val = 'aspect_type_id_value' + assert arg == mock_val + + +def test_create_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_aspect_type( + catalog.CreateAspectTypeRequest(), + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + +@pytest.mark.asyncio +async def test_create_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_aspect_type( + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].aspect_type_id + mock_val = 'aspect_type_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_aspect_type( + catalog.CreateAspectTypeRequest(), + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateAspectTypeRequest, + dict, +]) +def test_update_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateAspectTypeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateAspectTypeRequest( + ) + +def test_update_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc + request = {} + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_aspect_type] = mock_rpc + + request = {} + await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_aspect_type_async_from_dict(): + await test_update_aspect_type_async(request_type=dict) + +def test_update_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateAspectTypeRequest() + + request.aspect_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'aspect_type.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateAspectTypeRequest() + + request.aspect_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'aspect_type.name=name_value', + ) in kw['metadata'] + + +def test_update_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_aspect_type( + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_aspect_type( + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteAspectTypeRequest, + dict, +]) +def test_delete_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteAspectTypeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteAspectTypeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc + request = {} + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_aspect_type] = mock_rpc + + request = {} + await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_aspect_type_async_from_dict(): + await test_delete_aspect_type_async(request_type=dict) + +def test_delete_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListAspectTypesRequest, + dict, +]) +def test_list_aspect_types(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListAspectTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAspectTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_aspect_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListAspectTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_aspect_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListAspectTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_aspect_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_aspect_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc + request = {} + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_aspect_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_aspect_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_aspect_types] = mock_rpc + + request = {} + await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_aspect_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListAspectTypesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListAspectTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAspectTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_aspect_types_async_from_dict(): + await test_list_aspect_types_async(request_type=dict) + +def test_list_aspect_types_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListAspectTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_aspect_types_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListAspectTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) + await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_aspect_types_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_aspect_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_aspect_types_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_aspect_types_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_aspect_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_aspect_types_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent='parent_value', + ) + + +def test_list_aspect_types_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_aspect_types(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.AspectType) + for i in results) +def test_list_aspect_types_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_aspect_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_aspect_types_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_aspect_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.AspectType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_aspect_types_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_aspect_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetAspectTypeRequest, + dict, +]) +def test_get_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + response = client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.AspectType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +def test_get_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetAspectTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetAspectTypeRequest( + name='name_value', + ) + +def test_get_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + request = {} + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_aspect_type] = mock_rpc + + request = {} + await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + response = await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.AspectType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.asyncio +async def test_get_aspect_type_async_from_dict(): + await test_get_aspect_type_async(request_type=dict) + +def test_get_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) + await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_aspect_type( + catalog.GetAspectTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_aspect_type( + catalog.GetAspectTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryGroupRequest, + dict, +]) +def test_create_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryGroupRequest( + parent='parent_value', + entry_group_id='entry_group_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryGroupRequest( + parent='parent_value', + entry_group_id='entry_group_id_value', + ) + +def test_create_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc + request = {} + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry_group] = mock_rpc + + request = {} + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + +def test_create_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_group( + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + + +def test_create_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_group( + catalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_group( + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_group( + catalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryGroupRequest, + dict, +]) +def test_update_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryGroupRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryGroupRequest( + ) + +def test_update_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc + request = {} + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry_group] = mock_rpc + + request = {} + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + +def test_update_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_group( + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_group( + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryGroupRequest, + dict, +]) +def test_delete_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryGroupRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryGroupRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc + request = {} + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry_group] = mock_rpc + + request = {} + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + +def test_delete_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryGroupsRequest, + dict, +]) +def test_list_entry_groups(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_entry_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntryGroupsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entry_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntryGroupsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_entry_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_groups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc + request = {} + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_groups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entry_groups in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entry_groups] = mock_rpc + + request = {} + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryGroupsRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + +def test_list_entry_groups_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_groups_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_groups_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + + +def test_list_entry_groups_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_groups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryGroup) + for i in results) +def test_list_entry_groups_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_groups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_groups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.EntryGroup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryGroupRequest, + dict, +]) +def test_get_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + response = client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryGroup) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +def test_get_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryGroupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryGroupRequest( + name='name_value', + ) + +def test_get_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + request = {} + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry_group] = mock_rpc + + request = {} + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + response = await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryGroup) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + +def test_get_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_group( + catalog.GetEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_group( + catalog.GetEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryRequest, + dict, +]) +def test_create_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_create_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryRequest( + parent='parent_value', + entry_id='entry_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryRequest( + parent='parent_value', + entry_id='entry_id_value', + ) + +def test_create_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + request = {} + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry] = mock_rpc + + request = {} + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + +def test_create_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry( + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + + +def test_create_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry( + catalog.CreateEntryRequest(), + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry( + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry( + catalog.CreateEntryRequest(), + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryRequest, + dict, +]) +def test_update_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_update_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryRequest( + ) + +def test_update_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + request = {} + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry] = mock_rpc + + request = {} + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + +def test_update_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry( + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry( + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryRequest, + dict, +]) +def test_delete_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_delete_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryRequest( + name='name_value', + ) + +def test_delete_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + request = {} + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry] = mock_rpc + + request = {} + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + +def test_delete_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + catalog.DeleteEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry( + catalog.DeleteEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntriesRequest, + dict, +]) +def test_list_entries(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + request = {} + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entries] = mock_rpc + + request = {} + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntriesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + +def test_list_entries_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entries_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entries_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entries_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entries( + catalog.ListEntriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entries_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entries_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entries( + catalog.ListEntriesRequest(), + parent='parent_value', + ) + + +def test_list_entries_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.Entry) + for i in results) +def test_list_entries_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entries_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.Entry) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entries_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryRequest, + dict, +]) +def test_get_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_get_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryRequest( + name='name_value', + ) + +def test_get_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + request = {} + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry] = mock_rpc + + request = {} + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + +def test_get_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry( + catalog.GetEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry( + catalog.GetEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.LookupEntryRequest, + dict, +]) +def test_lookup_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.LookupEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_lookup_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.LookupEntryRequest( + name='name_value', + entry='entry_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.lookup_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.LookupEntryRequest( + name='name_value', + entry='entry_value', + ) + +def test_lookup_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + request = {} + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_lookup_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.lookup_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.lookup_entry] = mock_rpc + + request = {} + await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.LookupEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.LookupEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + +def test_lookup_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.LookupEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_lookup_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.LookupEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + catalog.SearchEntriesRequest, + dict, +]) +def test_search_entries(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.SearchEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEntriesPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_search_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.SearchEntriesRequest( + name='name_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + scope='scope_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.search_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.SearchEntriesRequest( + name='name_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + scope='scope_value', + ) + +def test_search_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + request = {} + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_entries] = mock_rpc + + request = {} + await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.SearchEntriesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.SearchEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEntriesAsyncPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_search_entries_async_from_dict(): + await test_search_entries_async(request_type=dict) + +def test_search_entries_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.SearchEntriesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_entries_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.SearchEntriesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) + await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_search_entries_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_entries( + name='name_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + + +def test_search_entries_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_entries( + catalog.SearchEntriesRequest(), + name='name_value', + query='query_value', + ) + +@pytest.mark.asyncio +async def test_search_entries_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_entries( + name='name_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_search_entries_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_entries( + catalog.SearchEntriesRequest(), + name='name_value', + query='query_value', + ) + + +def test_search_entries_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.search_entries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.SearchEntriesResult) + for i in results) +def test_search_entries_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_entries_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.SearchEntriesResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_search_entries_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.CreateMetadataJobRequest, + dict, +]) +def test_create_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateMetadataJobRequest( + parent='parent_value', + metadata_job_id='metadata_job_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateMetadataJobRequest( + parent='parent_value', + metadata_job_id='metadata_job_id_value', + ) + +def test_create_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc + request = {} + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_metadata_job] = mock_rpc + + request = {} + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_from_dict(): + await test_create_metadata_job_async(request_type=dict) + +def test_create_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metadata_job( + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name='name_value') + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = 'metadata_job_id_value' + assert arg == mock_val + + +def test_create_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metadata_job( + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name='name_value') + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = 'metadata_job_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetMetadataJobRequest, + dict, +]) +def test_get_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + ) + response = client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +def test_get_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetMetadataJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetMetadataJobRequest( + name='name_value', + ) + +def test_get_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc + request = {} + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_metadata_job] = mock_rpc + + request = {} + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.GetMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + )) + response = await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_from_dict(): + await test_get_metadata_job_async(request_type=dict) + +def test_get_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListMetadataJobsRequest, + dict, +]) +def test_list_metadata_jobs(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListMetadataJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_metadata_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListMetadataJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_metadata_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_metadata_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc + request = {} + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_metadata_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_metadata_jobs] = mock_rpc + + request = {} + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async(transport: str = 'grpc_asyncio', request_type=catalog.ListMetadataJobsRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_from_dict(): + await test_list_metadata_jobs_async(request_type=dict) + +def test_list_metadata_jobs_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_metadata_jobs_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metadata_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_metadata_jobs_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metadata_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent='parent_value', + ) + + +def test_list_metadata_jobs_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) + for i in results) +def test_list_metadata_jobs_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metadata_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metadata_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.MetadataJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_metadata_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.CancelMetadataJobRequest, + dict, +]) +def test_cancel_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CancelMetadataJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CancelMetadataJobRequest( + name='name_value', + ) + +def test_cancel_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc + request = {} + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_metadata_job] = mock_rpc + + request = {} + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CancelMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_from_dict(): + await test_cancel_metadata_job_async(request_type=dict) + +def test_cancel_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = None + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_cancel_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_cancel_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CatalogServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CatalogServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = CatalogServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_aspect_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_groups_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + )) + await client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_aspect_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + await client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_groups_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + await client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lookup_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + )) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CatalogServiceGrpcTransport, + ) + +def test_catalog_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CatalogServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_catalog_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CatalogServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_entry_type', + 'update_entry_type', + 'delete_entry_type', + 'list_entry_types', + 'get_entry_type', + 'create_aspect_type', + 'update_aspect_type', + 'delete_aspect_type', + 'list_aspect_types', + 'get_aspect_type', + 'create_entry_group', + 'update_entry_group', + 'delete_entry_group', + 'list_entry_groups', + 'get_entry_group', + 'create_entry', + 'update_entry', + 'delete_entry', + 'list_entries', + 'get_entry', + 'lookup_entry', + 'search_entries', + 'create_metadata_job', + 'get_metadata_job', + 'list_metadata_jobs', + 'cancel_metadata_job', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_catalog_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CatalogServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_catalog_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CatalogServiceTransport() + adc.assert_called_once() + + +def test_catalog_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CatalogServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + ], +) +def test_catalog_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + ], +) +def test_catalog_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CatalogServiceGrpcTransport, grpc_helpers), + (transports.CatalogServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_catalog_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_catalog_service_host_no_port(transport_name): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_catalog_service_host_with_port(transport_name): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_catalog_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CatalogServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_catalog_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CatalogServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_catalog_service_grpc_lro_client(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_catalog_service_grpc_lro_async_client(): + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_aspect_type_path(): + project = "squid" + location = "clam" + aspect_type = "whelk" + expected = "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) + actual = CatalogServiceClient.aspect_type_path(project, location, aspect_type) + assert expected == actual + + +def test_parse_aspect_type_path(): + expected = { + "project": "octopus", + "location": "oyster", + "aspect_type": "nudibranch", + } + path = CatalogServiceClient.aspect_type_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_aspect_type_path(path) + assert expected == actual + +def test_entry_path(): + project = "cuttlefish" + location = "mussel" + entry_group = "winkle" + entry = "nautilus" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + actual = CatalogServiceClient.entry_path(project, location, entry_group, entry) + assert expected == actual + + +def test_parse_entry_path(): + expected = { + "project": "scallop", + "location": "abalone", + "entry_group": "squid", + "entry": "clam", + } + path = CatalogServiceClient.entry_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_path(path) + assert expected == actual + +def test_entry_group_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + actual = CatalogServiceClient.entry_group_path(project, location, entry_group) + assert expected == actual + + +def test_parse_entry_group_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "entry_group": "mussel", + } + path = CatalogServiceClient.entry_group_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_group_path(path) + assert expected == actual + +def test_entry_type_path(): + project = "winkle" + location = "nautilus" + entry_type = "scallop" + expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) + actual = CatalogServiceClient.entry_type_path(project, location, entry_type) + assert expected == actual + + +def test_parse_entry_type_path(): + expected = { + "project": "abalone", + "location": "squid", + "entry_type": "clam", + } + path = CatalogServiceClient.entry_type_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_type_path(path) + assert expected == actual + +def test_metadata_job_path(): + project = "whelk" + location = "octopus" + metadataJob = "oyster" + expected = "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) + actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) + assert expected == actual + + +def test_parse_metadata_job_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "metadataJob": "mussel", + } + path = CatalogServiceClient.metadata_job_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_metadata_job_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CatalogServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = CatalogServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = CatalogServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = CatalogServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CatalogServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = CatalogServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = CatalogServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = CatalogServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CatalogServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = CatalogServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = CatalogServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py new file mode 100644 index 000000000000..5d28ebbdf192 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -0,0 +1,5202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.content_service import ContentServiceAsyncClient +from google.cloud.dataplex_v1.services.content_service import ContentServiceClient +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.services.content_service import transports +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ContentServiceClient._get_default_mtls_endpoint(None) is None + assert ContentServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ContentServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ContentServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ContentServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ContentServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ContentServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ContentServiceClient._get_client_cert_source(None, False) is None + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ContentServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ContentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ContentServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ContentServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ContentServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ContentServiceClient._get_universe_domain(None, None) == ContentServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ContentServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ContentServiceClient, "grpc"), + (ContentServiceAsyncClient, "grpc_asyncio"), +]) +def test_content_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ContentServiceGrpcTransport, "grpc"), + (transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_content_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ContentServiceClient, "grpc"), + (ContentServiceAsyncClient, "grpc_asyncio"), +]) +def test_content_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_content_service_client_get_transport_class(): + transport = ContentServiceClient.get_transport_class() + available_transports = [ + transports.ContentServiceGrpcTransport, + ] + assert transport in available_transports + + transport = ContentServiceClient.get_transport_class("grpc") + assert transport == transports.ContentServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test_content_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "true"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "false"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_content_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ContentServiceClient, ContentServiceAsyncClient +]) +@mock.patch.object(ContentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceAsyncClient)) +def test_content_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ContentServiceClient, ContentServiceAsyncClient +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test_content_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ContentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_content_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_content_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_content_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ContentServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_content_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.CreateContentRequest, + dict, +]) +def test_create_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_content.CreateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_create_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_content.CreateContentRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.CreateContentRequest( + parent='parent_value', + ) + +def test_create_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_content] = mock_rpc + request = {} + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_content] = mock_rpc + + request = {} + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.CreateContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_content.CreateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_content_async_from_dict(): + await test_create_content_async(request_type=dict) + +def test_create_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = analyze.Content() + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_content( + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + + +def test_create_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_content( + gcd_content.CreateContentRequest(), + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_content( + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_content( + gcd_content.CreateContentRequest(), + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.UpdateContentRequest, + dict, +]) +def test_update_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_content.UpdateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_update_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_content.UpdateContentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.UpdateContentRequest( + ) + +def test_update_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_content] = mock_rpc + request = {} + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_content] = mock_rpc + + request = {} + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.UpdateContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_content.UpdateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_content_async_from_dict(): + await test_update_content_async(request_type=dict) + +def test_update_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = analyze.Content() + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'content.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'content.name=name_value', + ) in kw['metadata'] + + +def test_update_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_content( + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_content( + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + content.DeleteContentRequest, + dict, +]) +def test_delete_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.DeleteContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.DeleteContentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.DeleteContentRequest( + name='name_value', + ) + +def test_delete_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc + request = {} + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_content] = mock_rpc + + request = {} + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_content_async(transport: str = 'grpc_asyncio', request_type=content.DeleteContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.DeleteContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_content_async_from_dict(): + await test_delete_content_async(request_type=dict) + +def test_delete_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = None + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_content( + content.DeleteContentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_content( + content.DeleteContentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + content.GetContentRequest, + dict, +]) +def test_get_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.GetContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_get_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.GetContentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.GetContentRequest( + name='name_value', + ) + +def test_get_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_content] = mock_rpc + request = {} + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_content] = mock_rpc + + request = {} + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_content_async(transport: str = 'grpc_asyncio', request_type=content.GetContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.GetContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_content_async_from_dict(): + await test_get_content_async(request_type=dict) + +def test_get_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = analyze.Content() + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_content( + content.GetContentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_content( + content.GetContentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + content.ListContentRequest, + dict, +]) +def test_list_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse( + next_page_token='next_page_token_value', + ) + response = client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.ListContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.ListContentRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.ListContentRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_content] = mock_rpc + request = {} + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_content] = mock_rpc + + request = {} + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_content_async(transport: str = 'grpc_asyncio', request_type=content.ListContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.ListContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_content_async_from_dict(): + await test_list_content_async(request_type=dict) + +def test_list_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = content.ListContentResponse() + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_content( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_content( + content.ListContentRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_content( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_content( + content.ListContentRequest(), + parent='parent_value', + ) + + +def test_list_content_pager(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_content(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Content) + for i in results) +def test_list_content_pages(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + pages = list(client.list_content(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_content_async_pager(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_content(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Content) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_content_async_pages(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_content(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ContentServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ContentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ContentServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = analyze.Content() + client.create_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.CreateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = analyze.Content() + client.update_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.UpdateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = None + client.delete_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.DeleteContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = analyze.Content() + client.get_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.GetContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = content.ListContentResponse() + client.list_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.ListContentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ContentServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.create_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.CreateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.update_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.UpdateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.DeleteContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.get_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.GetContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( + next_page_token='next_page_token_value', + )) + await client.list_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.ListContentRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ContentServiceGrpcTransport, + ) + +def test_content_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_content_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_content', + 'update_content', + 'delete_content', + 'get_content', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'list_content', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_content_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_content_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport() + adc.assert_called_once() + + +def test_content_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ContentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ContentServiceGrpcTransport, grpc_helpers), + (transports.ContentServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_content_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_content_service_host_no_port(transport_name): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_content_service_host_with_port(transport_name): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_content_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_content_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_content_path(): + project = "squid" + location = "clam" + lake = "whelk" + content = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) + actual = ContentServiceClient.content_path(project, location, lake, content) + assert expected == actual + + +def test_parse_content_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "content": "mussel", + } + path = ContentServiceClient.content_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_content_path(path) + assert expected == actual + +def test_lake_path(): + project = "winkle" + location = "nautilus" + lake = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + actual = ContentServiceClient.lake_path(project, location, lake) + assert expected == actual + + +def test_parse_lake_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + } + path = ContentServiceClient.lake_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_lake_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ContentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ContentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = ContentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ContentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ContentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ContentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = ContentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ContentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ContentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ContentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = ContentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py new file mode 100644 index 000000000000..04fa20e609a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -0,0 +1,6013 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceAsyncClient +from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceClient +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.services.data_scan_service import transports +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataScanServiceClient._get_default_mtls_endpoint(None) is None + assert DataScanServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataScanServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataScanServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataScanServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataScanServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataScanServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataScanServiceClient._get_client_cert_source(None, False) is None + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataScanServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataScanServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataScanServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataScanServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataScanServiceClient._get_universe_domain(None, None) == DataScanServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataScanServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataScanServiceClient, "grpc"), + (DataScanServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_scan_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataScanServiceGrpcTransport, "grpc"), + (transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_scan_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataScanServiceClient, "grpc"), + (DataScanServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_scan_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_data_scan_service_client_get_transport_class(): + transport = DataScanServiceClient.get_transport_class() + available_transports = [ + transports.DataScanServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DataScanServiceClient.get_transport_class("grpc") + assert transport == transports.DataScanServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test_data_scan_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "true"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "false"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_scan_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataScanServiceClient, DataScanServiceAsyncClient +]) +@mock.patch.object(DataScanServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceAsyncClient)) +def test_data_scan_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataScanServiceClient, DataScanServiceAsyncClient +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test_data_scan_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_scan_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_scan_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_scan_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataScanServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_scan_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.CreateDataScanRequest, + dict, +]) +def test_create_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.CreateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.CreateDataScanRequest( + parent='parent_value', + data_scan_id='data_scan_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.CreateDataScanRequest( + parent='parent_value', + data_scan_id='data_scan_id_value', + ) + +def test_create_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc + request = {} + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_scan] = mock_rpc + + request = {} + await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.CreateDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.CreateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_scan_async_from_dict(): + await test_create_data_scan_async(request_type=dict) + +def test_create_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.CreateDataScanRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.CreateDataScanRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_scan( + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].data_scan_id + mock_val = 'data_scan_id_value' + assert arg == mock_val + + +def test_create_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_scan( + datascans.CreateDataScanRequest(), + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_scan( + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].data_scan_id + mock_val = 'data_scan_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_scan( + datascans.CreateDataScanRequest(), + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.UpdateDataScanRequest, + dict, +]) +def test_update_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.UpdateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.UpdateDataScanRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.UpdateDataScanRequest( + ) + +def test_update_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc + request = {} + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_scan] = mock_rpc + + request = {} + await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.UpdateDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.UpdateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_scan_async_from_dict(): + await test_update_data_scan_async(request_type=dict) + +def test_update_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.UpdateDataScanRequest() + + request.data_scan.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_scan.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.UpdateDataScanRequest() + + request.data_scan.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_scan.name=name_value', + ) in kw['metadata'] + + +def test_update_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_scan( + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_scan( + datascans.UpdateDataScanRequest(), + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_scan( + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_scan( + datascans.UpdateDataScanRequest(), + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.DeleteDataScanRequest, + dict, +]) +def test_delete_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.DeleteDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.DeleteDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.DeleteDataScanRequest( + name='name_value', + ) + +def test_delete_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc + request = {} + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_scan] = mock_rpc + + request = {} + await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.DeleteDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.DeleteDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_scan_async_from_dict(): + await test_delete_data_scan_async(request_type=dict) + +def test_delete_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.DeleteDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.DeleteDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_scan( + datascans.DeleteDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_scan( + datascans.DeleteDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanRequest, + dict, +]) +def test_get_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + ) + response = client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScan) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +def test_get_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GetDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GetDataScanRequest( + name='name_value', + ) + +def test_get_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc + request = {} + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_scan] = mock_rpc + + request = {} + await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + )) + response = await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScan) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.asyncio +async def test_get_data_scan_async_from_dict(): + await test_get_data_scan_async(request_type=dict) + +def test_get_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = datascans.DataScan() + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) + await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan( + datascans.GetDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_scan( + datascans.GetDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScansRequest, + dict, +]) +def test_list_data_scans(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScansPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_data_scans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.ListDataScansRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_scans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.ListDataScansRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_scans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc + request = {} + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scans_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_scans in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_scans] = mock_rpc + + request = {} + await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scans_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScansRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScansAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_data_scans_async_from_dict(): + await test_list_data_scans_async(request_type=dict) + +def test_list_data_scans_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScansRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = datascans.ListDataScansResponse() + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_scans_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScansRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) + await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_scans_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_scans( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_scans_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scans( + datascans.ListDataScansRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_scans_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_scans( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_scans_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_scans( + datascans.ListDataScansRequest(), + parent='parent_value', + ) + + +def test_list_data_scans_pager(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_scans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScan) + for i in results) +def test_list_data_scans_pages(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_scans(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_scans_async_pager(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_scans(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datascans.DataScan) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_scans_async_pages(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_scans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datascans.RunDataScanRequest, + dict, +]) +def test_run_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse( + ) + response = client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.RunDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.RunDataScanResponse) + + +def test_run_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.RunDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.RunDataScanRequest( + name='name_value', + ) + +def test_run_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc + request = {} + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_data_scan] = mock_rpc + + request = {} + await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.RunDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( + )) + response = await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.RunDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.RunDataScanResponse) + + +@pytest.mark.asyncio +async def test_run_data_scan_async_from_dict(): + await test_run_data_scan_async(request_type=dict) + +def test_run_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.RunDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = datascans.RunDataScanResponse() + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.RunDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) + await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_run_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_run_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_data_scan( + datascans.RunDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_run_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_data_scan( + datascans.RunDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanJobRequest, + dict, +]) +def test_get_data_scan_job(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + ) + response = client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScanJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == datascans.DataScanJob.State.RUNNING + assert response.message == 'message_value' + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GetDataScanJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_scan_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GetDataScanJobRequest( + name='name_value', + ) + +def test_get_data_scan_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc + request = {} + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_scan_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_scan_job] = mock_rpc + + request = {} + await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_job_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanJobRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + )) + response = await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScanJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == datascans.DataScanJob.State.RUNNING + assert response.message == 'message_value' + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.asyncio +async def test_get_data_scan_job_async_from_dict(): + await test_get_data_scan_job_async(request_type=dict) + +def test_get_data_scan_job_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = datascans.DataScanJob() + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_scan_job_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) + await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_scan_job_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_scan_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_scan_job_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan_job( + datascans.GetDataScanJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_scan_job_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_scan_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_scan_job_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_scan_job( + datascans.GetDataScanJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScanJobsRequest, + dict, +]) +def test_list_data_scan_jobs(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScanJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScanJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.ListDataScanJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_scan_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.ListDataScanJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_data_scan_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc + request = {} + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_scan_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_scan_jobs] = mock_rpc + + request = {} + await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScanJobsRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScanJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScanJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_from_dict(): + await test_list_data_scan_jobs_async(request_type=dict) + +def test_list_data_scan_jobs_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScanJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = datascans.ListDataScanJobsResponse() + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScanJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) + await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_scan_jobs_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_scan_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_scan_jobs_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scan_jobs( + datascans.ListDataScanJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_scan_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_scan_jobs( + datascans.ListDataScanJobsRequest(), + parent='parent_value', + ) + + +def test_list_data_scan_jobs_pager(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_scan_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScanJob) + for i in results) +def test_list_data_scan_jobs_pages(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_scan_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_pager(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_scan_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datascans.DataScanJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_pages(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_scan_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datascans.GenerateDataQualityRulesRequest, + dict, +]) +def test_generate_data_quality_rules(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse( + ) + response = client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GenerateDataQualityRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.GenerateDataQualityRulesResponse) + + +def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GenerateDataQualityRulesRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.generate_data_quality_rules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GenerateDataQualityRulesRequest( + name='name_value', + ) + +def test_generate_data_quality_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc + request = {} + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_data_quality_rules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_data_quality_rules] = mock_rpc + + request = {} + await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async(transport: str = 'grpc_asyncio', request_type=datascans.GenerateDataQualityRulesRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( + )) + response = await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GenerateDataQualityRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.GenerateDataQualityRulesResponse) + + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async_from_dict(): + await test_generate_data_quality_rules_async(request_type=dict) + +def test_generate_data_quality_rules_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GenerateDataQualityRulesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = datascans.GenerateDataQualityRulesResponse() + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GenerateDataQualityRulesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) + await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_generate_data_quality_rules_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_data_quality_rules( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_generate_data_quality_rules_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_data_quality_rules( + datascans.GenerateDataQualityRulesRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_data_quality_rules( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_data_quality_rules( + datascans.GenerateDataQualityRulesRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataScanServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataScanServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataScanServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.CreateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.UpdateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.DeleteDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = datascans.DataScan() + client.get_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scans_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = datascans.ListDataScansResponse() + client.list_data_scans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = datascans.RunDataScanResponse() + client.run_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.RunDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_job_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = datascans.DataScanJob() + client.get_data_scan_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scan_jobs_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = datascans.ListDataScanJobsResponse() + client.list_data_scan_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScanJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_data_quality_rules_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = datascans.GenerateDataQualityRulesResponse() + client.generate_data_quality_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GenerateDataQualityRulesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataScanServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.CreateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.UpdateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.DeleteDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + )) + await client.get_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_scans_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_data_scans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( + )) + await client.run_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.RunDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_scan_job_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + )) + await client.get_data_scan_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_scan_jobs_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + )) + await client.list_data_scan_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScanJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_data_quality_rules_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( + )) + await client.generate_data_quality_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GenerateDataQualityRulesRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataScanServiceGrpcTransport, + ) + +def test_data_scan_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataScanServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_scan_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataScanServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_data_scan', + 'update_data_scan', + 'delete_data_scan', + 'get_data_scan', + 'list_data_scans', + 'run_data_scan', + 'get_data_scan_job', + 'list_data_scan_jobs', + 'generate_data_quality_rules', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_scan_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataScanServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_scan_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataScanServiceTransport() + adc.assert_called_once() + + +def test_data_scan_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataScanServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, + ], +) +def test_data_scan_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, + ], +) +def test_data_scan_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataScanServiceGrpcTransport, grpc_helpers), + (transports.DataScanServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_scan_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_scan_service_host_no_port(transport_name): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_scan_service_host_with_port(transport_name): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_data_scan_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataScanServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_scan_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataScanServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_scan_service_grpc_lro_client(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_scan_service_grpc_lro_async_client(): + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_path(): + project = "squid" + location = "clam" + connection = "whelk" + expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + actual = DataScanServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = DataScanServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_connection_path(path) + assert expected == actual + +def test_data_scan_path(): + project = "cuttlefish" + location = "mussel" + dataScan = "winkle" + expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) + actual = DataScanServiceClient.data_scan_path(project, location, dataScan) + assert expected == actual + + +def test_parse_data_scan_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dataScan": "abalone", + } + path = DataScanServiceClient.data_scan_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_data_scan_path(path) + assert expected == actual + +def test_data_scan_job_path(): + project = "squid" + location = "clam" + dataScan = "whelk" + job = "octopus" + expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) + actual = DataScanServiceClient.data_scan_job_path(project, location, dataScan, job) + assert expected == actual + + +def test_parse_data_scan_job_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "dataScan": "cuttlefish", + "job": "mussel", + } + path = DataScanServiceClient.data_scan_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_data_scan_job_path(path) + assert expected == actual + +def test_dataset_path(): + project = "winkle" + dataset = "nautilus" + expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) + actual = DataScanServiceClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "scallop", + "dataset": "abalone", + } + path = DataScanServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_dataset_path(path) + assert expected == actual + +def test_entity_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + actual = DataScanServiceClient.entity_path(project, location, lake, zone, entity) + assert expected == actual + + +def test_parse_entity_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", + } + path = DataScanServiceClient.entity_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_entity_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataScanServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DataScanServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataScanServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DataScanServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataScanServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DataScanServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = DataScanServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DataScanServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataScanServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DataScanServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataScanServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py new file mode 100644 index 000000000000..9ef5ff03519d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -0,0 +1,8505 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceAsyncClient +from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceClient +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.services.data_taxonomy_service import transports +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(None) is None + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataTaxonomyServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTaxonomyServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataTaxonomyServiceClient._get_client_cert_source(None, False) is None + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataTaxonomyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataTaxonomyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataTaxonomyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataTaxonomyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataTaxonomyServiceClient._get_universe_domain(None, None) == DataTaxonomyServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataTaxonomyServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataTaxonomyServiceClient, "grpc"), + (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_taxonomy_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_taxonomy_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataTaxonomyServiceClient, "grpc"), + (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_taxonomy_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_data_taxonomy_service_client_get_transport_class(): + transport = DataTaxonomyServiceClient.get_transport_class() + available_transports = [ + transports.DataTaxonomyServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DataTaxonomyServiceClient.get_transport_class("grpc") + assert transport == transports.DataTaxonomyServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "true"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "false"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_taxonomy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient +]) +@mock.patch.object(DataTaxonomyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_taxonomy_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_taxonomy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_taxonomy_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataTaxonomyServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_taxonomy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.CreateDataTaxonomyRequest, + dict, +]) +def test_create_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest( + parent='parent_value', + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_data_taxonomy.CreateDataTaxonomyRequest( + parent='parent_value', + data_taxonomy_id='data_taxonomy_id_value', + ) + +def test_create_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc + request = {} + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_taxonomy] = mock_rpc + + request = {} + await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async_from_dict(): + await test_create_data_taxonomy_async(request_type=dict) + +def test_create_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_taxonomy( + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].data_taxonomy_id + mock_val = 'data_taxonomy_id_value' + assert arg == mock_val + + +def test_create_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_taxonomy( + gcd_data_taxonomy.CreateDataTaxonomyRequest(), + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_taxonomy( + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].data_taxonomy_id + mock_val = 'data_taxonomy_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_taxonomy( + gcd_data_taxonomy.CreateDataTaxonomyRequest(), + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.UpdateDataTaxonomyRequest, + dict, +]) +def test_update_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest( + ) + +def test_update_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc + request = {} + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_taxonomy] = mock_rpc + + request = {} + await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async_from_dict(): + await test_update_data_taxonomy_async(request_type=dict) + +def test_update_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + request.data_taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_taxonomy.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + request.data_taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_taxonomy.name=name_value', + ) in kw['metadata'] + + +def test_update_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_taxonomy( + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_taxonomy( + gcd_data_taxonomy.UpdateDataTaxonomyRequest(), + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_taxonomy( + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_taxonomy( + gcd_data_taxonomy.UpdateDataTaxonomyRequest(), + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataTaxonomyRequest, + dict, +]) +def test_delete_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataTaxonomyRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataTaxonomyRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc + request = {} + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_taxonomy] = mock_rpc + + request = {} + await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async_from_dict(): + await test_delete_data_taxonomy_async(request_type=dict) + +def test_delete_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_taxonomy( + data_taxonomy.DeleteDataTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_taxonomy( + data_taxonomy.DeleteDataTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataTaxonomiesRequest, + dict, +]) +def test_list_data_taxonomies(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataTaxonomiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTaxonomiesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataTaxonomiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_taxonomies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataTaxonomiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_taxonomies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_taxonomies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc + request = {} + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_taxonomies in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_taxonomies] = mock_rpc + + request = {} + await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataTaxonomiesRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataTaxonomiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTaxonomiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_from_dict(): + await test_list_data_taxonomies_async(request_type=dict) + +def test_list_data_taxonomies_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) + await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_taxonomies_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_taxonomies_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_taxonomies( + data_taxonomy.ListDataTaxonomiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_taxonomies_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_taxonomies_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_taxonomies( + data_taxonomy.ListDataTaxonomiesRequest(), + parent='parent_value', + ) + + +def test_list_data_taxonomies_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_taxonomies(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataTaxonomy) + for i in results) +def test_list_data_taxonomies_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_taxonomies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_taxonomies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataTaxonomy) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_taxonomies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataTaxonomyRequest, + dict, +]) +def test_get_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + ) + response = client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataTaxonomy) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + assert response.class_count == 1182 + + +def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataTaxonomyRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataTaxonomyRequest( + name='name_value', + ) + +def test_get_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc + request = {} + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_taxonomy] = mock_rpc + + request = {} + await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + )) + response = await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataTaxonomy) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + assert response.class_count == 1182 + + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async_from_dict(): + await test_get_data_taxonomy_async(request_type=dict) + +def test_get_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = data_taxonomy.DataTaxonomy() + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) + await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_taxonomy( + data_taxonomy.GetDataTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_taxonomy( + data_taxonomy.GetDataTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeBindingRequest, + dict, +]) +def test_create_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.CreateDataAttributeBindingRequest( + parent='parent_value', + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.CreateDataAttributeBindingRequest( + parent='parent_value', + data_attribute_binding_id='data_attribute_binding_id_value', + ) + +def test_create_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc + request = {} + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_attribute_binding] = mock_rpc + + request = {} + await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async_from_dict(): + await test_create_data_attribute_binding_async(request_type=dict) + +def test_create_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeBindingRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeBindingRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_attribute_binding( + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_binding_id + mock_val = 'data_attribute_binding_id_value' + assert arg == mock_val + + +def test_create_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute_binding( + data_taxonomy.CreateDataAttributeBindingRequest(), + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_attribute_binding( + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_binding_id + mock_val = 'data_attribute_binding_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_attribute_binding( + data_taxonomy.CreateDataAttributeBindingRequest(), + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeBindingRequest, + dict, +]) +def test_update_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.UpdateDataAttributeBindingRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest( + ) + +def test_update_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc + request = {} + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_attribute_binding] = mock_rpc + + request = {} + await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async_from_dict(): + await test_update_data_attribute_binding_async(request_type=dict) + +def test_update_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeBindingRequest() + + request.data_attribute_binding.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute_binding.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeBindingRequest() + + request.data_attribute_binding.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute_binding.name=name_value', + ) in kw['metadata'] + + +def test_update_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_attribute_binding( + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute_binding( + data_taxonomy.UpdateDataAttributeBindingRequest(), + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_attribute_binding( + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_attribute_binding( + data_taxonomy.UpdateDataAttributeBindingRequest(), + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeBindingRequest, + dict, +]) +def test_delete_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataAttributeBindingRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataAttributeBindingRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc + request = {} + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute_binding] = mock_rpc + + request = {} + await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async_from_dict(): + await test_delete_data_attribute_binding_async(request_type=dict) + +def test_delete_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute_binding( + data_taxonomy.DeleteDataAttributeBindingRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_attribute_binding( + data_taxonomy.DeleteDataAttributeBindingRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributeBindingsRequest, + dict, +]) +def test_list_data_attribute_bindings(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributeBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributeBindingsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataAttributeBindingsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_attribute_bindings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataAttributeBindingsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_attribute_bindings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc + request = {} + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_attribute_bindings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_attribute_bindings] = mock_rpc + + request = {} + await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributeBindingsRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributeBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributeBindingsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_from_dict(): + await test_list_data_attribute_bindings_async(request_type=dict) + +def test_list_data_attribute_bindings_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributeBindingsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributeBindingsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) + await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_attribute_bindings_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_attribute_bindings( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_attribute_bindings_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attribute_bindings( + data_taxonomy.ListDataAttributeBindingsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_attribute_bindings( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_attribute_bindings( + data_taxonomy.ListDataAttributeBindingsRequest(), + parent='parent_value', + ) + + +def test_list_data_attribute_bindings_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_attribute_bindings(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttributeBinding) + for i in results) +def test_list_data_attribute_bindings_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_attribute_bindings(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_attribute_bindings(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataAttributeBinding) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_attribute_bindings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeBindingRequest, + dict, +]) +def test_get_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + resource='resource_value', + ) + response = client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttributeBinding) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.attributes == ['attributes_value'] + + +def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataAttributeBindingRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataAttributeBindingRequest( + name='name_value', + ) + +def test_get_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc + request = {} + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_attribute_binding] = mock_rpc + + request = {} + await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + )) + response = await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttributeBinding) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.attributes == ['attributes_value'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async_from_dict(): + await test_get_data_attribute_binding_async(request_type=dict) + +def test_get_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = data_taxonomy.DataAttributeBinding() + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) + await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute_binding( + data_taxonomy.GetDataAttributeBindingRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_attribute_binding( + data_taxonomy.GetDataAttributeBindingRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeRequest, + dict, +]) +def test_create_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.CreateDataAttributeRequest( + parent='parent_value', + data_attribute_id='data_attribute_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.CreateDataAttributeRequest( + parent='parent_value', + data_attribute_id='data_attribute_id_value', + ) + +def test_create_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc + request = {} + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_attribute] = mock_rpc + + request = {} + await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_attribute_async_from_dict(): + await test_create_data_attribute_async(request_type=dict) + +def test_create_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_attribute( + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_id + mock_val = 'data_attribute_id_value' + assert arg == mock_val + + +def test_create_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute( + data_taxonomy.CreateDataAttributeRequest(), + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_attribute( + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_id + mock_val = 'data_attribute_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_attribute( + data_taxonomy.CreateDataAttributeRequest(), + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeRequest, + dict, +]) +def test_update_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.UpdateDataAttributeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.UpdateDataAttributeRequest( + ) + +def test_update_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc + request = {} + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_attribute] = mock_rpc + + request = {} + await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_attribute_async_from_dict(): + await test_update_data_attribute_async(request_type=dict) + +def test_update_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeRequest() + + request.data_attribute.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeRequest() + + request.data_attribute.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute.name=name_value', + ) in kw['metadata'] + + +def test_update_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_attribute( + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute( + data_taxonomy.UpdateDataAttributeRequest(), + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_attribute( + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_attribute( + data_taxonomy.UpdateDataAttributeRequest(), + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeRequest, + dict, +]) +def test_delete_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataAttributeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataAttributeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc + request = {} + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute] = mock_rpc + + request = {} + await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_attribute_async_from_dict(): + await test_delete_data_attribute_async(request_type=dict) + +def test_delete_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute( + data_taxonomy.DeleteDataAttributeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_attribute( + data_taxonomy.DeleteDataAttributeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributesRequest, + dict, +]) +def test_list_data_attributes(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_attributes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataAttributesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_attributes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataAttributesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_attributes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attributes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc + request = {} + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attributes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_attributes in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_attributes] = mock_rpc + + request = {} + await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attributes_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributesRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_attributes_async_from_dict(): + await test_list_data_attributes_async(request_type=dict) + +def test_list_data_attributes_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributesResponse() + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_attributes_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) + await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_attributes_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_attributes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_attributes_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attributes( + data_taxonomy.ListDataAttributesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_attributes_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_attributes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_attributes_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_attributes( + data_taxonomy.ListDataAttributesRequest(), + parent='parent_value', + ) + + +def test_list_data_attributes_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_attributes(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttribute) + for i in results) +def test_list_data_attributes_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_attributes(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_attributes_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_attributes(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataAttribute) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_attributes_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_attributes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeRequest, + dict, +]) +def test_get_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + ) + response = client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttribute) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.parent_id == 'parent_id_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + + +def test_get_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataAttributeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataAttributeRequest( + name='name_value', + ) + +def test_get_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc + request = {} + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_attribute] = mock_rpc + + request = {} + await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + )) + response = await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttribute) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.parent_id == 'parent_id_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_data_attribute_async_from_dict(): + await test_get_data_attribute_async(request_type=dict) + +def test_get_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = data_taxonomy.DataAttribute() + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) + await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute( + data_taxonomy.GetDataAttributeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_attribute( + data_taxonomy.GetDataAttributeRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataTaxonomyServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataTaxonomyServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_taxonomies_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + client.list_data_taxonomies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataTaxonomiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = data_taxonomy.DataTaxonomy() + client.get_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attribute_bindings_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + client.list_data_attribute_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributeBindingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = data_taxonomy.DataAttributeBinding() + client.get_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attributes_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributesResponse() + client.list_data_attributes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = data_taxonomy.DataAttribute() + client.get_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataTaxonomyServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_taxonomies_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_taxonomies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataTaxonomiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + )) + await client.get_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_attribute_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributeBindingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + )) + await client.get_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_attributes_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_attributes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + )) + await client.get_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataTaxonomyServiceGrpcTransport, + ) + +def test_data_taxonomy_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataTaxonomyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_taxonomy_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataTaxonomyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_data_taxonomy', + 'update_data_taxonomy', + 'delete_data_taxonomy', + 'list_data_taxonomies', + 'get_data_taxonomy', + 'create_data_attribute_binding', + 'update_data_attribute_binding', + 'delete_data_attribute_binding', + 'list_data_attribute_bindings', + 'get_data_attribute_binding', + 'create_data_attribute', + 'update_data_attribute', + 'delete_data_attribute', + 'list_data_attributes', + 'get_data_attribute', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_taxonomy_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTaxonomyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_taxonomy_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTaxonomyServiceTransport() + adc.assert_called_once() + + +def test_data_taxonomy_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataTaxonomyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, + ], +) +def test_data_taxonomy_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, + ], +) +def test_data_taxonomy_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTaxonomyServiceGrpcTransport, grpc_helpers), + (transports.DataTaxonomyServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_taxonomy_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_taxonomy_service_host_no_port(transport_name): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_taxonomy_service_host_with_port(transport_name): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_data_taxonomy_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTaxonomyServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_taxonomy_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_taxonomy_service_grpc_lro_client(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_taxonomy_service_grpc_lro_async_client(): + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_attribute_path(): + project = "squid" + location = "clam" + dataTaxonomy = "whelk" + data_attribute_id = "octopus" + expected = "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) + actual = DataTaxonomyServiceClient.data_attribute_path(project, location, dataTaxonomy, data_attribute_id) + assert expected == actual + + +def test_parse_data_attribute_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "dataTaxonomy": "cuttlefish", + "data_attribute_id": "mussel", + } + path = DataTaxonomyServiceClient.data_attribute_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_attribute_path(path) + assert expected == actual + +def test_data_attribute_binding_path(): + project = "winkle" + location = "nautilus" + data_attribute_binding_id = "scallop" + expected = "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) + actual = DataTaxonomyServiceClient.data_attribute_binding_path(project, location, data_attribute_binding_id) + assert expected == actual + + +def test_parse_data_attribute_binding_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_attribute_binding_id": "clam", + } + path = DataTaxonomyServiceClient.data_attribute_binding_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_attribute_binding_path(path) + assert expected == actual + +def test_data_taxonomy_path(): + project = "whelk" + location = "octopus" + data_taxonomy_id = "oyster" + expected = "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) + actual = DataTaxonomyServiceClient.data_taxonomy_path(project, location, data_taxonomy_id) + assert expected == actual + + +def test_parse_data_taxonomy_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "data_taxonomy_id": "mussel", + } + path = DataTaxonomyServiceClient.data_taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataTaxonomyServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DataTaxonomyServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataTaxonomyServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DataTaxonomyServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataTaxonomyServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DataTaxonomyServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DataTaxonomyServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DataTaxonomyServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataTaxonomyServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DataTaxonomyServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataTaxonomyServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py new file mode 100644 index 000000000000..40bd69b6232e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -0,0 +1,16649 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceAsyncClient +from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceClient +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.services.dataplex_service import transports +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataplexServiceClient._get_default_mtls_endpoint(None) is None + assert DataplexServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataplexServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataplexServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataplexServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataplexServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataplexServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataplexServiceClient._get_client_cert_source(None, False) is None + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataplexServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataplexServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataplexServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataplexServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataplexServiceClient._get_universe_domain(None, None) == DataplexServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataplexServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataplexServiceClient, "grpc"), + (DataplexServiceAsyncClient, "grpc_asyncio"), +]) +def test_dataplex_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataplexServiceGrpcTransport, "grpc"), + (transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_dataplex_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataplexServiceClient, "grpc"), + (DataplexServiceAsyncClient, "grpc_asyncio"), +]) +def test_dataplex_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_dataplex_service_client_get_transport_class(): + transport = DataplexServiceClient.get_transport_class() + available_transports = [ + transports.DataplexServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DataplexServiceClient.get_transport_class("grpc") + assert transport == transports.DataplexServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test_dataplex_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "true"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "false"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dataplex_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataplexServiceClient, DataplexServiceAsyncClient +]) +@mock.patch.object(DataplexServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceAsyncClient)) +def test_dataplex_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataplexServiceClient, DataplexServiceAsyncClient +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test_dataplex_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_dataplex_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dataplex_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dataplex_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataplexServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dataplex_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + service.CreateLakeRequest, + dict, +]) +def test_create_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateLakeRequest( + parent='parent_value', + lake_id='lake_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateLakeRequest( + parent='parent_value', + lake_id='lake_id_value', + ) + +def test_create_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc + request = {} + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_lake] = mock_rpc + + request = {} + await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_lake_async(transport: str = 'grpc_asyncio', request_type=service.CreateLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_lake_async_from_dict(): + await test_create_lake_async(request_type=dict) + +def test_create_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateLakeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateLakeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_lake( + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].lake_id + mock_val = 'lake_id_value' + assert arg == mock_val + + +def test_create_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lake( + service.CreateLakeRequest(), + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + +@pytest.mark.asyncio +async def test_create_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_lake( + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].lake_id + mock_val = 'lake_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_lake( + service.CreateLakeRequest(), + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateLakeRequest, + dict, +]) +def test_update_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateLakeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateLakeRequest( + ) + +def test_update_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc + request = {} + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_lake] = mock_rpc + + request = {} + await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_lake_async(transport: str = 'grpc_asyncio', request_type=service.UpdateLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_lake_async_from_dict(): + await test_update_lake_async(request_type=dict) + +def test_update_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateLakeRequest() + + request.lake.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'lake.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateLakeRequest() + + request.lake.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'lake.name=name_value', + ) in kw['metadata'] + + +def test_update_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_lake( + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_lake( + service.UpdateLakeRequest(), + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_lake( + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_lake( + service.UpdateLakeRequest(), + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteLakeRequest, + dict, +]) +def test_delete_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteLakeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteLakeRequest( + name='name_value', + ) + +def test_delete_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc + request = {} + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_lake] = mock_rpc + + request = {} + await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_lake_async(transport: str = 'grpc_asyncio', request_type=service.DeleteLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_lake_async_from_dict(): + await test_delete_lake_async(request_type=dict) + +def test_delete_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lake( + service.DeleteLakeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_lake( + service.DeleteLakeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakesRequest, + dict, +]) +def test_list_lakes(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListLakesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_lakes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListLakesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_lakes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListLakesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_lakes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lakes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc + request = {} + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_lakes in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_lakes] = mock_rpc + + request = {} + await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lakes_async(transport: str = 'grpc_asyncio', request_type=service.ListLakesRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListLakesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_lakes_async_from_dict(): + await test_list_lakes_async(request_type=dict) + +def test_list_lakes_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = service.ListLakesResponse() + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_lakes_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) + await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_lakes_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_lakes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_lakes_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lakes( + service.ListLakesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_lakes_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_lakes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_lakes_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_lakes( + service.ListLakesRequest(), + parent='parent_value', + ) + + +def test_list_lakes_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_lakes(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Lake) + for i in results) +def test_list_lakes_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + pages = list(client.list_lakes(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_lakes_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_lakes(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Lake) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_lakes_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_lakes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetLakeRequest, + dict, +]) +def test_get_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + ) + response = client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Lake) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.service_account == 'service_account_value' + + +def test_get_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetLakeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetLakeRequest( + name='name_value', + ) + +def test_get_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc + request = {} + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_lake] = mock_rpc + + request = {} + await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_lake_async(transport: str = 'grpc_asyncio', request_type=service.GetLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + )) + response = await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Lake) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_lake_async_from_dict(): + await test_get_lake_async(request_type=dict) + +def test_get_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = resources.Lake() + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) + await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_lake( + service.GetLakeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_lake( + service.GetLakeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakeActionsRequest, + dict, +]) +def test_list_lake_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListLakeActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakeActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_lake_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListLakeActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_lake_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListLakeActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_lake_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lake_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc + request = {} + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lake_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_lake_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_lake_actions] = mock_rpc + + request = {} + await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lake_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListLakeActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListLakeActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakeActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_lake_actions_async_from_dict(): + await test_list_lake_actions_async(request_type=dict) + +def test_list_lake_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakeActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_lake_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakeActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_lake_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_lake_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_lake_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lake_actions( + service.ListLakeActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_lake_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_lake_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_lake_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_lake_actions( + service.ListLakeActionsRequest(), + parent='parent_value', + ) + + +def test_list_lake_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_lake_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_lake_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_lake_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_lake_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_lake_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_lake_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_lake_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateZoneRequest, + dict, +]) +def test_create_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateZoneRequest( + parent='parent_value', + zone_id='zone_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateZoneRequest( + parent='parent_value', + zone_id='zone_id_value', + ) + +def test_create_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_zone] = mock_rpc + + request = {} + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_zone_async(transport: str = 'grpc_asyncio', request_type=service.CreateZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_zone_async_from_dict(): + await test_create_zone_async(request_type=dict) + +def test_create_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_zone( + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].zone_id + mock_val = 'zone_id_value' + assert arg == mock_val + + +def test_create_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_zone( + service.CreateZoneRequest(), + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + +@pytest.mark.asyncio +async def test_create_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_zone( + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].zone_id + mock_val = 'zone_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_zone( + service.CreateZoneRequest(), + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateZoneRequest, + dict, +]) +def test_update_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateZoneRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateZoneRequest( + ) + +def test_update_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_zone] = mock_rpc + + request = {} + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_zone_async(transport: str = 'grpc_asyncio', request_type=service.UpdateZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_zone_async_from_dict(): + await test_update_zone_async(request_type=dict) + +def test_update_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'zone.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'zone.name=name_value', + ) in kw['metadata'] + + +def test_update_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_zone( + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_zone( + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteZoneRequest, + dict, +]) +def test_delete_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteZoneRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteZoneRequest( + name='name_value', + ) + +def test_delete_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_zone] = mock_rpc + + request = {} + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_zone_async(transport: str = 'grpc_asyncio', request_type=service.DeleteZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_zone_async_from_dict(): + await test_delete_zone_async(request_type=dict) + +def test_delete_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_zone( + service.DeleteZoneRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_zone( + service.DeleteZoneRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListZonesRequest, + dict, +]) +def test_list_zones(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_zones_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZonesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_zones(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_zones_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_zones in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_zones] = mock_rpc + + request = {} + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zones_async(transport: str = 'grpc_asyncio', request_type=service.ListZonesRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_zones_async_from_dict(): + await test_list_zones_async(request_type=dict) + +def test_list_zones_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_zones_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_zones_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_zones( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_zones_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zones( + service.ListZonesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_zones_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_zones( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_zones_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_zones( + service.ListZonesRequest(), + parent='parent_value', + ) + + +def test_list_zones_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_zones(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) + for i in results) +def test_list_zones_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zones(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_zones_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zones(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Zone) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_zones_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zones(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetZoneRequest, + dict, +]) +def test_get_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + ) + response = client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == resources.Zone.Type.RAW + + +def test_get_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetZoneRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetZoneRequest( + name='name_value', + ) + +def test_get_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_zone] = mock_rpc + + request = {} + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_zone_async(transport: str = 'grpc_asyncio', request_type=service.GetZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + )) + response = await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == resources.Zone.Type.RAW + + +@pytest.mark.asyncio +async def test_get_zone_async_from_dict(): + await test_get_zone_async(request_type=dict) + +def test_get_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = resources.Zone() + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_zone( + service.GetZoneRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_zone( + service.GetZoneRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListZoneActionsRequest, + dict, +]) +def test_list_zone_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZoneActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZoneActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_zone_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZoneActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_zone_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZoneActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_zone_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zone_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc + request = {} + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zone_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_zone_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_zone_actions] = mock_rpc + + request = {} + await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zone_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListZoneActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListZoneActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZoneActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_zone_actions_async_from_dict(): + await test_list_zone_actions_async(request_type=dict) + +def test_list_zone_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZoneActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_zone_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZoneActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_zone_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_zone_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_zone_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zone_actions( + service.ListZoneActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_zone_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_zone_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_zone_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_zone_actions( + service.ListZoneActionsRequest(), + parent='parent_value', + ) + + +def test_list_zone_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_zone_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_zone_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zone_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_zone_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zone_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_zone_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zone_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateAssetRequest, + dict, +]) +def test_create_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateAssetRequest( + parent='parent_value', + asset_id='asset_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateAssetRequest( + parent='parent_value', + asset_id='asset_id_value', + ) + +def test_create_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc + request = {} + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_asset] = mock_rpc + + request = {} + await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_asset_async(transport: str = 'grpc_asyncio', request_type=service.CreateAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_asset_async_from_dict(): + await test_create_asset_async(request_type=dict) + +def test_create_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateAssetRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateAssetRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_asset( + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].asset_id + mock_val = 'asset_id_value' + assert arg == mock_val + + +def test_create_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_asset( + service.CreateAssetRequest(), + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + +@pytest.mark.asyncio +async def test_create_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_asset( + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].asset_id + mock_val = 'asset_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_asset( + service.CreateAssetRequest(), + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateAssetRequest, + dict, +]) +def test_update_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateAssetRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateAssetRequest( + ) + +def test_update_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc + request = {} + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_asset] = mock_rpc + + request = {} + await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_asset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_asset_async_from_dict(): + await test_update_asset_async(request_type=dict) + +def test_update_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateAssetRequest() + + request.asset.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'asset.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateAssetRequest() + + request.asset.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'asset.name=name_value', + ) in kw['metadata'] + + +def test_update_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_asset( + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_asset( + service.UpdateAssetRequest(), + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_asset( + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_asset( + service.UpdateAssetRequest(), + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteAssetRequest, + dict, +]) +def test_delete_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteAssetRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteAssetRequest( + name='name_value', + ) + +def test_delete_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc + request = {} + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_asset] = mock_rpc + + request = {} + await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_asset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_asset_async_from_dict(): + await test_delete_asset_async(request_type=dict) + +def test_delete_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_asset( + service.DeleteAssetRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_asset( + service.DeleteAssetRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetsRequest, + dict, +]) +def test_list_assets(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListAssetsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc + + request = {} + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListAssetsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_assets_async_from_dict(): + await test_list_assets_async(request_type=dict) + +def test_list_assets_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = service.ListAssetsResponse() + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_assets_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_assets_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_assets_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + service.ListAssetsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_assets_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_assets_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_assets( + service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_assets(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Asset) + for i in results) +def test_list_assets_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_assets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_assets_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_assets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Asset) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_assets_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_assets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetAssetRequest, + dict, +]) +def test_get_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + response = client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Asset) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +def test_get_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetAssetRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetAssetRequest( + name='name_value', + ) + +def test_get_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc + request = {} + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_asset] = mock_rpc + + request = {} + await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_asset_async(transport: str = 'grpc_asyncio', request_type=service.GetAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + response = await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Asset) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_asset_async_from_dict(): + await test_get_asset_async(request_type=dict) + +def test_get_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = resources.Asset() + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) + await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_asset( + service.GetAssetRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_asset( + service.GetAssetRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetActionsRequest, + dict, +]) +def test_list_asset_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListAssetActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_asset_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListAssetActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_asset_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListAssetActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_asset_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_asset_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc + request = {} + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_asset_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_asset_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_asset_actions] = mock_rpc + + request = {} + await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_asset_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListAssetActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_asset_actions_async_from_dict(): + await test_list_asset_actions_async(request_type=dict) + +def test_list_asset_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_asset_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_asset_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_asset_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_asset_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_asset_actions( + service.ListAssetActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_asset_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_asset_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_asset_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_asset_actions( + service.ListAssetActionsRequest(), + parent='parent_value', + ) + + +def test_list_asset_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_asset_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_asset_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_asset_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_asset_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_asset_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_asset_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_asset_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateTaskRequest, + dict, +]) +def test_create_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateTaskRequest( + parent='parent_value', + task_id='task_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateTaskRequest( + parent='parent_value', + task_id='task_id_value', + ) + +def test_create_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_task] = mock_rpc + request = {} + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_task] = mock_rpc + + request = {} + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=service.CreateTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + +def test_create_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].task_id + mock_val = 'task_id_value' + assert arg == mock_val + + +def test_create_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + service.CreateTaskRequest(), + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].task_id + mock_val = 'task_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + service.CreateTaskRequest(), + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateTaskRequest, + dict, +]) +def test_update_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateTaskRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateTaskRequest( + ) + +def test_update_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_task] = mock_rpc + request = {} + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_task] = mock_rpc + + request = {} + await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_task_async(transport: str = 'grpc_asyncio', request_type=service.UpdateTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_task_async_from_dict(): + await test_update_task_async(request_type=dict) + +def test_update_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTaskRequest() + + request.task.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'task.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTaskRequest() + + request.task.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'task.name=name_value', + ) in kw['metadata'] + + +def test_update_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_task( + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_task( + service.UpdateTaskRequest(), + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_task( + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_task( + service.UpdateTaskRequest(), + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteTaskRequest, + dict, +]) +def test_delete_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteTaskRequest( + name='name_value', + ) + +def test_delete_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc + request = {} + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_task] = mock_rpc + + request = {} + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=service.DeleteTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + +def test_delete_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + service.DeleteTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + service.DeleteTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListTasksRequest, + dict, +]) +def test_list_tasks(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListTasksRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTasksRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc + request = {} + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_tasks in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc + + request = {} + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=service.ListTasksRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + +def test_list_tasks_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = service.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_tasks_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_tasks_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + service.ListTasksRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + service.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Task) + for i in results) +def test_list_tasks_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tasks.Task) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetTaskRequest, + dict, +]) +def test_get_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Task) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + + +def test_get_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTaskRequest( + name='name_value', + ) + +def test_get_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + request = {} + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc + + request = {} + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=service.GetTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + )) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Task) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + +def test_get_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = tasks.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + service.GetTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + service.GetTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListJobsRequest, + dict, +]) +def test_list_jobs(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListJobsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListJobsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc + + request = {} + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=service.ListJobsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + +def test_list_jobs_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = service.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + service.ListJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + service.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Job) + for i in results) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tasks.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.RunTaskRequest, + dict, +]) +def test_run_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse( + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.RunTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.RunTaskResponse) + + +def test_run_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.RunTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RunTaskRequest( + name='name_value', + ) + +def test_run_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_task] = mock_rpc + request = {} + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_task] = mock_rpc + + request = {} + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=service.RunTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( + )) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.RunTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.RunTaskResponse) + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + +def test_run_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RunTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = service.RunTaskResponse() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RunTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_run_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_run_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + service.RunTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + service.RunTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.GetJobRequest, + dict, +]) +def test_get_job(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Job) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == tasks.Job.State.RUNNING + assert response.retry_count == 1214 + assert response.service == tasks.Job.Service.DATAPROC + assert response.service_job == 'service_job_value' + assert response.message == 'message_value' + assert response.trigger == tasks.Job.Trigger.TASK_CONFIG + + +def test_get_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetJobRequest( + name='name_value', + ) + +def test_get_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc + + request = {} + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=service.GetJobRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Job) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == tasks.Job.State.RUNNING + assert response.retry_count == 1214 + assert response.service == tasks.Job.Service.DATAPROC + assert response.service_job == 'service_job_value' + assert response.message == 'message_value' + assert response.trigger == tasks.Job.Trigger.TASK_CONFIG + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + +def test_get_job_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = tasks.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + service.GetJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + service.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.CancelJobRequest, + dict, +]) +def test_cancel_job(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CancelJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CancelJobRequest( + name='name_value', + ) + +def test_cancel_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc + + request = {} + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=service.CancelJobRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) + +def test_cancel_job_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = None + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_cancel_job_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_cancel_job_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + service.CancelJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_cancel_job_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_job_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_job( + service.CancelJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.CreateEnvironmentRequest, + dict, +]) +def test_create_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateEnvironmentRequest( + parent='parent_value', + environment_id='environment_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest( + parent='parent_value', + environment_id='environment_id_value', + ) + +def test_create_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc + request = {} + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_environment] = mock_rpc + + request = {} + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_environment_async(transport: str = 'grpc_asyncio', request_type=service.CreateEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_environment_async_from_dict(): + await test_create_environment_async(request_type=dict) + +def test_create_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_environment( + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].environment_id + mock_val = 'environment_id_value' + assert arg == mock_val + + +def test_create_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + service.CreateEnvironmentRequest(), + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + +@pytest.mark.asyncio +async def test_create_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_environment( + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].environment_id + mock_val = 'environment_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_environment( + service.CreateEnvironmentRequest(), + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateEnvironmentRequest, + dict, +]) +def test_update_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateEnvironmentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateEnvironmentRequest( + ) + +def test_update_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc + request = {} + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_environment] = mock_rpc + + request = {} + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_environment_async(transport: str = 'grpc_asyncio', request_type=service.UpdateEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_environment_async_from_dict(): + await test_update_environment_async(request_type=dict) + +def test_update_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'environment.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'environment.name=name_value', + ) in kw['metadata'] + + +def test_update_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_environment( + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_environment( + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteEnvironmentRequest, + dict, +]) +def test_delete_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteEnvironmentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest( + name='name_value', + ) + +def test_delete_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc + request = {} + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_environment] = mock_rpc + + request = {} + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_environment_async(transport: str = 'grpc_asyncio', request_type=service.DeleteEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_environment_async_from_dict(): + await test_delete_environment_async(request_type=dict) + +def test_delete_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_environment( + service.DeleteEnvironmentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_environment( + service.DeleteEnvironmentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListEnvironmentsRequest, + dict, +]) +def test_list_environments(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListEnvironmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_environments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListEnvironmentsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_environments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_environments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_environments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc + request = {} + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_environments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_environments in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_environments] = mock_rpc + + request = {} + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_environments_async(transport: str = 'grpc_asyncio', request_type=service.ListEnvironmentsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListEnvironmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_environments_async_from_dict(): + await test_list_environments_async(request_type=dict) + +def test_list_environments_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_environments_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_environments_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_environments( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_environments_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + service.ListEnvironmentsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_environments_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_environments( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_environments_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_environments( + service.ListEnvironmentsRequest(), + parent='parent_value', + ) + + +def test_list_environments_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_environments(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Environment) + for i in results) +def test_list_environments_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_environments(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_environments_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_environments(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Environment) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_environments_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_environments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetEnvironmentRequest, + dict, +]) +def test_get_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + response = client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +def test_get_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetEnvironmentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest( + name='name_value', + ) + +def test_get_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc + request = {} + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_environment] = mock_rpc + + request = {} + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_environment_async(transport: str = 'grpc_asyncio', request_type=service.GetEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + response = await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_environment_async_from_dict(): + await test_get_environment_async(request_type=dict) + +def test_get_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = analyze.Environment() + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + service.GetEnvironmentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_environment( + service.GetEnvironmentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListSessionsRequest, + dict, +]) +def test_list_sessions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSessionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSessionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=service.ListSessionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + +def test_list_sessions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = service.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_sessions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + service.ListSessionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + service.ListSessionsRequest(), + parent='parent_value', + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_sessions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Session) + for i in results) +def test_list_sessions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Session) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataplexServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataplexServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataplexServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lakes_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = service.ListLakesResponse() + client.list_lakes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = resources.Lake() + client.get_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lake_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_lake_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakeActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zones_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZonesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = resources.Zone() + client.get_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zone_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_zone_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZoneActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = service.ListAssetsResponse() + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = resources.Asset() + client.get_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_asset_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_asset_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tasks_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = service.ListTasksResponse() + client.list_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListTasksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = tasks.Task() + client.get_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_jobs_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = service.ListJobsResponse() + client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = service.RunTaskResponse() + client.run_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.RunTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = tasks.Job() + client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = None + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_environments_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListEnvironmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = analyze.Environment() + client.get_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = service.ListSessionsResponse() + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListSessionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataplexServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lakes_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_lakes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + )) + await client.get_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lake_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_lake_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakeActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_zones_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( + next_page_token='next_page_token_value', + )) + await client.list_zones(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZonesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + )) + await client.get_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_zone_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_zone_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZoneActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_assets_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + await client.get_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_asset_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_asset_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_tasks_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListTasksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + )) + await client.get_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_jobs_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( + next_page_token='next_page_token_value', + )) + await client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( + )) + await client.run_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.RunTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_job_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + )) + await client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_job_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_environments_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + )) + await client.list_environments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListEnvironmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + await client.get_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sessions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListSessionsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataplexServiceGrpcTransport, + ) + +def test_dataplex_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataplexServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dataplex_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataplexServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_lake', + 'update_lake', + 'delete_lake', + 'list_lakes', + 'get_lake', + 'list_lake_actions', + 'create_zone', + 'update_zone', + 'delete_zone', + 'list_zones', + 'get_zone', + 'list_zone_actions', + 'create_asset', + 'update_asset', + 'delete_asset', + 'list_assets', + 'get_asset', + 'list_asset_actions', + 'create_task', + 'update_task', + 'delete_task', + 'list_tasks', + 'get_task', + 'list_jobs', + 'run_task', + 'get_job', + 'cancel_job', + 'create_environment', + 'update_environment', + 'delete_environment', + 'list_environments', + 'get_environment', + 'list_sessions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dataplex_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataplexServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dataplex_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataplexServiceTransport() + adc.assert_called_once() + + +def test_dataplex_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataplexServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, + ], +) +def test_dataplex_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, + ], +) +def test_dataplex_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataplexServiceGrpcTransport, grpc_helpers), + (transports.DataplexServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dataplex_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_dataplex_service_host_no_port(transport_name): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_dataplex_service_host_with_port(transport_name): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_dataplex_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataplexServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dataplex_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataplexServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dataplex_service_grpc_lro_client(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_dataplex_service_grpc_lro_async_client(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_action_path(): + project = "squid" + location = "clam" + lake = "whelk" + action = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) + actual = DataplexServiceClient.action_path(project, location, lake, action) + assert expected == actual + + +def test_parse_action_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "action": "mussel", + } + path = DataplexServiceClient.action_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_action_path(path) + assert expected == actual + +def test_asset_path(): + project = "winkle" + location = "nautilus" + lake = "scallop" + zone = "abalone" + asset = "squid" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) + actual = DataplexServiceClient.asset_path(project, location, lake, zone, asset) + assert expected == actual + + +def test_parse_asset_path(): + expected = { + "project": "clam", + "location": "whelk", + "lake": "octopus", + "zone": "oyster", + "asset": "nudibranch", + } + path = DataplexServiceClient.asset_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_asset_path(path) + assert expected == actual + +def test_environment_path(): + project = "cuttlefish" + location = "mussel" + lake = "winkle" + environment = "nautilus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) + actual = DataplexServiceClient.environment_path(project, location, lake, environment) + assert expected == actual + + +def test_parse_environment_path(): + expected = { + "project": "scallop", + "location": "abalone", + "lake": "squid", + "environment": "clam", + } + path = DataplexServiceClient.environment_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_environment_path(path) + assert expected == actual + +def test_job_path(): + project = "whelk" + location = "octopus" + lake = "oyster" + task = "nudibranch" + job = "cuttlefish" + expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) + actual = DataplexServiceClient.job_path(project, location, lake, task, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "mussel", + "location": "winkle", + "lake": "nautilus", + "task": "scallop", + "job": "abalone", + } + path = DataplexServiceClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_job_path(path) + assert expected == actual + +def test_lake_path(): + project = "squid" + location = "clam" + lake = "whelk" + expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + actual = DataplexServiceClient.lake_path(project, location, lake) + assert expected == actual + + +def test_parse_lake_path(): + expected = { + "project": "octopus", + "location": "oyster", + "lake": "nudibranch", + } + path = DataplexServiceClient.lake_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_lake_path(path) + assert expected == actual + +def test_session_path(): + project = "cuttlefish" + location = "mussel" + lake = "winkle" + environment = "nautilus" + session = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) + actual = DataplexServiceClient.session_path(project, location, lake, environment, session) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + "environment": "whelk", + "session": "octopus", + } + path = DataplexServiceClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_session_path(path) + assert expected == actual + +def test_task_path(): + project = "oyster" + location = "nudibranch" + lake = "cuttlefish" + task = "mussel" + expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) + actual = DataplexServiceClient.task_path(project, location, lake, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "lake": "scallop", + "task": "abalone", + } + path = DataplexServiceClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_task_path(path) + assert expected == actual + +def test_zone_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + actual = DataplexServiceClient.zone_path(project, location, lake, zone) + assert expected == actual + + +def test_parse_zone_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "zone": "mussel", + } + path = DataplexServiceClient.zone_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_zone_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataplexServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DataplexServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataplexServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DataplexServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataplexServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DataplexServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DataplexServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DataplexServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataplexServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DataplexServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataplexServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py new file mode 100644 index 000000000000..a5293c1b348a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -0,0 +1,5952 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceAsyncClient +from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceClient +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.services.metadata_service import transports +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetadataServiceClient._get_default_mtls_endpoint(None) is None + assert MetadataServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetadataServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + MetadataServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetadataServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MetadataServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MetadataServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MetadataServiceClient._get_client_cert_source(None, False) is None + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert MetadataServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert MetadataServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert MetadataServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert MetadataServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert MetadataServiceClient._get_universe_domain(None, None) == MetadataServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + MetadataServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetadataServiceClient, "grpc"), + (MetadataServiceAsyncClient, "grpc_asyncio"), +]) +def test_metadata_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetadataServiceGrpcTransport, "grpc"), + (transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metadata_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetadataServiceClient, "grpc"), + (MetadataServiceAsyncClient, "grpc_asyncio"), +]) +def test_metadata_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + + +def test_metadata_service_client_get_transport_class(): + transport = MetadataServiceClient.get_transport_class() + available_transports = [ + transports.MetadataServiceGrpcTransport, + ] + assert transport in available_transports + + transport = MetadataServiceClient.get_transport_class("grpc") + assert transport == transports.MetadataServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test_metadata_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "true"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "false"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metadata_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MetadataServiceClient, MetadataServiceAsyncClient +]) +@mock.patch.object(MetadataServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceAsyncClient)) +def test_metadata_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + MetadataServiceClient, MetadataServiceAsyncClient +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test_metadata_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metadata_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_metadata_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_metadata_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetadataServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_metadata_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.CreateEntityRequest, + dict, +]) +def test_create_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.CreateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_create_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.CreateEntityRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreateEntityRequest( + parent='parent_value', + ) + +def test_create_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc + request = {} + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entity] = mock_rpc + + request = {} + await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreateEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.CreateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_create_entity_async_from_dict(): + await test_create_entity_async(request_type=dict) + +def test_create_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreateEntityRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreateEntityRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entity( + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name='name_value') + assert arg == mock_val + + +def test_create_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entity( + metadata_.CreateEntityRequest(), + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entity( + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entity( + metadata_.CreateEntityRequest(), + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.UpdateEntityRequest, + dict, +]) +def test_update_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.UpdateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_update_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.UpdateEntityRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.UpdateEntityRequest( + ) + +def test_update_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc + request = {} + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entity] = mock_rpc + + request = {} + await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.UpdateEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.UpdateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_update_entity_async_from_dict(): + await test_update_entity_async(request_type=dict) + +def test_update_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.UpdateEntityRequest() + + request.entity.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entity.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.UpdateEntityRequest() + + request.entity.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entity.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeleteEntityRequest, + dict, +]) +def test_delete_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.DeleteEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.DeleteEntityRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeleteEntityRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc + request = {} + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entity] = mock_rpc + + request = {} + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeleteEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.DeleteEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entity_async_from_dict(): + await test_delete_entity_async(request_type=dict) + +def test_delete_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = None + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entity( + metadata_.DeleteEntityRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entity( + metadata_.DeleteEntityRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetEntityRequest, + dict, +]) +def test_get_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.GetEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_get_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.GetEntityRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetEntityRequest( + name='name_value', + ) + +def test_get_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc + request = {} + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entity] = mock_rpc + + request = {} + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.GetEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_get_entity_async_from_dict(): + await test_get_entity_async(request_type=dict) + +def test_get_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity( + metadata_.GetEntityRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entity( + metadata_.GetEntityRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListEntitiesRequest, + dict, +]) +def test_list_entities(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.ListEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.ListEntitiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListEntitiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc + request = {} + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entities_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entities in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entities] = mock_rpc + + request = {} + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entities_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListEntitiesRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.ListEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entities_async_from_dict(): + await test_list_entities_async(request_type=dict) + +def test_list_entities_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = metadata_.ListEntitiesResponse() + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entities_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entities_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entities( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entities_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entities( + metadata_.ListEntitiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entities_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entities( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entities_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entities( + metadata_.ListEntitiesRequest(), + parent='parent_value', + ) + + +def test_list_entities_pager(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entities(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Entity) + for i in results) +def test_list_entities_pages(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entities(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entities_async_pager(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entities(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metadata_.Entity) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entities_async_pages(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entities(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + metadata_.CreatePartitionRequest, + dict, +]) +def test_create_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + response = client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.CreatePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +def test_create_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.CreatePartitionRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreatePartitionRequest( + parent='parent_value', + ) + +def test_create_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc + request = {} + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_partition] = mock_rpc + + request = {} + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreatePartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + response = await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.CreatePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_create_partition_async_from_dict(): + await test_create_partition_async(request_type=dict) + +def test_create_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_partition( + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name='name_value') + assert arg == mock_val + + +def test_create_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partition( + metadata_.CreatePartitionRequest(), + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_partition( + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_partition( + metadata_.CreatePartitionRequest(), + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeletePartitionRequest, + dict, +]) +def test_delete_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.DeletePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.DeletePartitionRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeletePartitionRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc + request = {} + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_partition] = mock_rpc + + request = {} + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeletePartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.DeletePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_partition_async_from_dict(): + await test_delete_partition_async(request_type=dict) + +def test_delete_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = None + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partition( + metadata_.DeletePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_partition( + metadata_.DeletePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetPartitionRequest, + dict, +]) +def test_get_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + response = client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.GetPartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +def test_get_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.GetPartitionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetPartitionRequest( + name='name_value', + ) + +def test_get_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc + request = {} + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_partition] = mock_rpc + + request = {} + await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetPartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + response = await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.GetPartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_partition_async_from_dict(): + await test_get_partition_async(request_type=dict) + +def test_get_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetPartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetPartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partition( + metadata_.GetPartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_partition( + metadata_.GetPartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListPartitionsRequest, + dict, +]) +def test_list_partitions(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.ListPartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartitionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_partitions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.ListPartitionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_partitions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListPartitionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc + request = {} + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_partitions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_partitions] = mock_rpc + + request = {} + await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_partitions_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListPartitionsRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.ListPartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartitionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_partitions_async_from_dict(): + await test_list_partitions_async(request_type=dict) + +def test_list_partitions_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListPartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = metadata_.ListPartitionsResponse() + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_partitions_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListPartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) + await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_partitions_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_partitions_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partitions( + metadata_.ListPartitionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_partitions_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_partitions_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_partitions( + metadata_.ListPartitionsRequest(), + parent='parent_value', + ) + + +def test_list_partitions_pager(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_partitions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Partition) + for i in results) +def test_list_partitions_pages(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + pages = list(client.list_partitions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_partitions_async_pager(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_partitions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metadata_.Partition) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_partitions_async_pages(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_partitions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetadataServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetadataServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = MetadataServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.create_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.update_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.UpdateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = None + client.delete_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeleteEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.get_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entities_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = metadata_.ListEntitiesResponse() + client.list_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.create_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreatePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = None + client.delete_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeletePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.get_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetPartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partitions_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = metadata_.ListPartitionsResponse() + client.list_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListPartitionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MetadataServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.create_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.update_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.UpdateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeleteEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.get_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entities_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + )) + await client.list_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + await client.create_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreatePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeletePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + await client.get_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetPartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_partitions_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListPartitionsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetadataServiceGrpcTransport, + ) + +def test_metadata_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetadataServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metadata_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetadataServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_entity', + 'update_entity', + 'delete_entity', + 'get_entity', + 'list_entities', + 'create_partition', + 'delete_partition', + 'get_partition', + 'list_partitions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metadata_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetadataServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_metadata_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetadataServiceTransport() + adc.assert_called_once() + + +def test_metadata_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetadataServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, + ], +) +def test_metadata_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, + ], +) +def test_metadata_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetadataServiceGrpcTransport, grpc_helpers), + (transports.MetadataServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metadata_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metadata_service_host_no_port(transport_name): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metadata_service_host_with_port(transport_name): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + ) + +def test_metadata_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetadataServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metadata_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetadataServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_entity_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + actual = MetadataServiceClient.entity_path(project, location, lake, zone, entity) + assert expected == actual + + +def test_parse_entity_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", + } + path = MetadataServiceClient.entity_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_entity_path(path) + assert expected == actual + +def test_partition_path(): + project = "scallop" + location = "abalone" + lake = "squid" + zone = "clam" + entity = "whelk" + partition = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) + actual = MetadataServiceClient.partition_path(project, location, lake, zone, entity, partition) + assert expected == actual + + +def test_parse_partition_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "zone": "mussel", + "entity": "winkle", + "partition": "nautilus", + } + path = MetadataServiceClient.partition_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_partition_path(path) + assert expected == actual + +def test_zone_path(): + project = "scallop" + location = "abalone" + lake = "squid" + zone = "clam" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + actual = MetadataServiceClient.zone_path(project, location, lake, zone) + assert expected == actual + + +def test_parse_zone_path(): + expected = { + "project": "whelk", + "location": "octopus", + "lake": "oyster", + "zone": "nudibranch", + } + path = MetadataServiceClient.zone_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_zone_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetadataServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = MetadataServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetadataServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = MetadataServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetadataServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = MetadataServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = MetadataServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = MetadataServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetadataServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = MetadataServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = MetadataServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From d1cb32f786bc7f462f1b0f3bc0049e7f33713675 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 15 Nov 2024 17:08:36 +0000 Subject: [PATCH 2/3] update replacement for google-cloud-dataplex --- .../client-post-processing/doc-formatting.yaml | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 71b2ebb536c2..effa34a100d7 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -223,20 +223,8 @@ replacements: - paths: [ packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py, ] - before: | - : - \ \"entry_source.display_name=AnExampleDisplayName\" - \ \"entry_type=projects\/example-project\/locations\/global\/entryTypes\/example-entry_type\" - \ \"entry_type=projects\/example-project\/locations\/us\/entryTypes\/a\* - \ OR entry_type=projects\/another-project\/locations\/\*\" \"NOT - \ entry_source.display_name=AnotherExampleDisplayName\". - after: | - : - `entry_source.display_name=AnExampleDisplayName` - `entry_type=projects/example-project/locations/global/entryTypes/example-entry_type` - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` `NOT - entry_source.display_name=AnotherExampleDisplayName`. + before: entry_type=projects\/another-project\/locations\/\* + after: "entry_type=projects/another-project/locations/\\*" count: 1 - paths: [ packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py, From 3e4ed2ebc2511e7fa3b973408e0e24a980b34fa0 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 15 Nov 2024 17:12:49 +0000 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-dataplex/v1/.coveragerc | 13 - .../google-cloud-dataplex/v1/.flake8 | 33 - .../google-cloud-dataplex/v1/MANIFEST.in | 2 - .../google-cloud-dataplex/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../google-cloud-dataplex/v1/docs/conf.py | 376 - .../v1/docs/dataplex_v1/catalog_service.rst | 10 - .../v1/docs/dataplex_v1/content_service.rst | 10 - .../v1/docs/dataplex_v1/data_scan_service.rst | 10 - .../dataplex_v1/data_taxonomy_service.rst | 10 - .../v1/docs/dataplex_v1/dataplex_service.rst | 10 - .../v1/docs/dataplex_v1/metadata_service.rst | 10 - .../v1/docs/dataplex_v1/services_.rst | 11 - .../v1/docs/dataplex_v1/types_.rst | 6 - .../google-cloud-dataplex/v1/docs/index.rst | 7 - .../v1/google/cloud/dataplex/__init__.py | 407 - .../v1/google/cloud/dataplex/gapic_version.py | 16 - .../v1/google/cloud/dataplex/py.typed | 2 - .../v1/google/cloud/dataplex_v1/__init__.py | 408 - .../cloud/dataplex_v1/gapic_metadata.json | 1093 - .../google/cloud/dataplex_v1/gapic_version.py | 16 - .../v1/google/cloud/dataplex_v1/py.typed | 2 - .../cloud/dataplex_v1/services/__init__.py | 15 - .../services/catalog_service/__init__.py | 22 - .../services/catalog_service/async_client.py | 3652 ---- .../services/catalog_service/client.py | 3986 ---- .../services/catalog_service/pagers.py | 837 - .../catalog_service/transports/README.rst | 9 - .../catalog_service/transports/__init__.py | 33 - .../catalog_service/transports/base.py | 707 - .../catalog_service/transports/grpc.py | 1076 - .../transports/grpc_asyncio.py | 1362 -- .../services/content_service/__init__.py | 22 - .../services/content_service/async_client.py | 1435 -- .../services/content_service/client.py | 1767 -- .../services/content_service/pagers.py | 163 - .../content_service/transports/README.rst | 9 - .../content_service/transports/__init__.py | 33 - .../content_service/transports/base.py | 377 - .../content_service/transports/grpc.py | 587 - .../transports/grpc_asyncio.py | 709 - .../services/data_scan_service/__init__.py | 22 - .../data_scan_service/async_client.py | 1665 -- .../services/data_scan_service/client.py | 2015 -- .../services/data_scan_service/pagers.py | 297 - .../data_scan_service/transports/README.rst | 9 - .../data_scan_service/transports/__init__.py | 33 - .../data_scan_service/transports/base.py | 358 - .../data_scan_service/transports/grpc.py | 615 - .../transports/grpc_asyncio.py | 706 - .../data_taxonomy_service/__init__.py | 22 - .../data_taxonomy_service/async_client.py | 2420 --- .../services/data_taxonomy_service/client.py | 2746 --- .../services/data_taxonomy_service/pagers.py | 432 - .../transports/README.rst | 9 - .../transports/__init__.py | 33 - .../data_taxonomy_service/transports/base.py | 443 - .../data_taxonomy_service/transports/grpc.py | 773 - .../transports/grpc_asyncio.py | 894 - .../services/dataplex_service/__init__.py | 22 - .../services/dataplex_service/async_client.py | 4580 ----- .../services/dataplex_service/client.py | 4933 ----- .../services/dataplex_service/pagers.py | 1380 -- .../dataplex_service/transports/README.rst | 9 - .../dataplex_service/transports/__init__.py | 33 - .../dataplex_service/transports/base.py | 833 - .../dataplex_service/transports/grpc.py | 1247 -- .../transports/grpc_asyncio.py | 1593 -- .../services/metadata_service/__init__.py | 22 - .../services/metadata_service/async_client.py | 1507 -- .../services/metadata_service/client.py | 1840 -- .../services/metadata_service/pagers.py | 297 - .../metadata_service/transports/README.rst | 9 - .../metadata_service/transports/__init__.py | 33 - .../metadata_service/transports/base.py | 389 - .../metadata_service/transports/grpc.py | 593 - .../transports/grpc_asyncio.py | 720 - .../cloud/dataplex_v1/types/__init__.py | 408 - .../google/cloud/dataplex_v1/types/analyze.py | 491 - .../google/cloud/dataplex_v1/types/catalog.py | 2630 --- .../google/cloud/dataplex_v1/types/content.py | 227 - .../cloud/dataplex_v1/types/data_profile.py | 540 - .../cloud/dataplex_v1/types/data_quality.py | 912 - .../cloud/dataplex_v1/types/data_taxonomy.py | 976 - .../cloud/dataplex_v1/types/datascans.py | 905 - .../v1/google/cloud/dataplex_v1/types/logs.py | 1352 -- .../cloud/dataplex_v1/types/metadata_.py | 1182 -- .../cloud/dataplex_v1/types/processing.py | 186 - .../cloud/dataplex_v1/types/resources.py | 1444 -- .../cloud/dataplex_v1/types/security.py | 90 - .../google/cloud/dataplex_v1/types/service.py | 1395 -- .../google/cloud/dataplex_v1/types/tasks.py | 751 - .../google-cloud-dataplex/v1/mypy.ini | 3 - .../google-cloud-dataplex/v1/noxfile.py | 280 - ...atalog_service_create_aspect_type_async.py | 62 - ...catalog_service_create_aspect_type_sync.py | 62 - ...ated_catalog_service_create_entry_async.py | 57 - ...atalog_service_create_entry_group_async.py | 57 - ...catalog_service_create_entry_group_sync.py | 57 - ...rated_catalog_service_create_entry_sync.py | 57 - ...catalog_service_create_entry_type_async.py | 57 - ..._catalog_service_create_entry_type_sync.py | 57 - ...atalog_service_delete_aspect_type_async.py | 56 - ...catalog_service_delete_aspect_type_sync.py | 56 - ...ated_catalog_service_delete_entry_async.py | 52 - ...atalog_service_delete_entry_group_async.py | 56 - ...catalog_service_delete_entry_group_sync.py | 56 - ...rated_catalog_service_delete_entry_sync.py | 52 - ...catalog_service_delete_entry_type_async.py | 56 - ..._catalog_service_delete_entry_type_sync.py | 56 - ...d_catalog_service_get_aspect_type_async.py | 52 - ...ed_catalog_service_get_aspect_type_sync.py | 52 - ...nerated_catalog_service_get_entry_async.py | 52 - ...d_catalog_service_get_entry_group_async.py | 52 - ...ed_catalog_service_get_entry_group_sync.py | 52 - ...enerated_catalog_service_get_entry_sync.py | 52 - ...ed_catalog_service_get_entry_type_async.py | 52 - ...ted_catalog_service_get_entry_type_sync.py | 52 - ...catalog_service_list_aspect_types_async.py | 53 - ..._catalog_service_list_aspect_types_sync.py | 53 - ...ated_catalog_service_list_entries_async.py | 53 - ...rated_catalog_service_list_entries_sync.py | 53 - ...catalog_service_list_entry_groups_async.py | 53 - ..._catalog_service_list_entry_groups_sync.py | 53 - ..._catalog_service_list_entry_types_async.py | 53 - ...d_catalog_service_list_entry_types_sync.py | 53 - ...ated_catalog_service_lookup_entry_async.py | 53 - ...rated_catalog_service_lookup_entry_sync.py | 53 - ...ed_catalog_service_search_entries_async.py | 54 - ...ted_catalog_service_search_entries_sync.py | 54 - ...atalog_service_update_aspect_type_async.py | 60 - ...catalog_service_update_aspect_type_sync.py | 60 - ...ated_catalog_service_update_entry_async.py | 55 - ...atalog_service_update_entry_group_async.py | 55 - ...catalog_service_update_entry_group_sync.py | 55 - ...rated_catalog_service_update_entry_sync.py | 55 - ...catalog_service_update_entry_type_async.py | 55 - ..._catalog_service_update_entry_type_sync.py | 55 - ...ed_content_service_create_content_async.py | 58 - ...ted_content_service_create_content_sync.py | 58 - ...ed_content_service_delete_content_async.py | 50 - ...ted_content_service_delete_content_sync.py | 50 - ...rated_content_service_get_content_async.py | 52 - ...erated_content_service_get_content_sync.py | 52 - ...ed_content_service_get_iam_policy_async.py | 53 - ...ted_content_service_get_iam_policy_sync.py | 53 - ...ated_content_service_list_content_async.py | 53 - ...rated_content_service_list_content_sync.py | 53 - ...ed_content_service_set_iam_policy_async.py | 53 - ...ted_content_service_set_iam_policy_sync.py | 53 - ...tent_service_test_iam_permissions_async.py | 54 - ...ntent_service_test_iam_permissions_sync.py | 54 - ...ed_content_service_update_content_async.py | 57 - ...ted_content_service_update_content_sync.py | 57 - ...ata_scan_service_create_data_scan_async.py | 62 - ...data_scan_service_create_data_scan_sync.py | 62 - ...ata_scan_service_delete_data_scan_async.py | 56 - ...data_scan_service_delete_data_scan_sync.py | 56 - ...rvice_generate_data_quality_rules_async.py | 52 - ...ervice_generate_data_quality_rules_sync.py | 52 - ...d_data_scan_service_get_data_scan_async.py | 52 - ...ta_scan_service_get_data_scan_job_async.py | 52 - ...ata_scan_service_get_data_scan_job_sync.py | 52 - ...ed_data_scan_service_get_data_scan_sync.py | 52 - ..._scan_service_list_data_scan_jobs_async.py | 53 - ...a_scan_service_list_data_scan_jobs_sync.py | 53 - ...data_scan_service_list_data_scans_async.py | 53 - ..._data_scan_service_list_data_scans_sync.py | 53 - ...d_data_scan_service_run_data_scan_async.py | 52 - ...ed_data_scan_service_run_data_scan_sync.py | 52 - ...ata_scan_service_update_data_scan_async.py | 60 - ...data_scan_service_update_data_scan_sync.py | 60 - ...omy_service_create_data_attribute_async.py | 57 - ...ice_create_data_attribute_binding_async.py | 61 - ...vice_create_data_attribute_binding_sync.py | 61 - ...nomy_service_create_data_attribute_sync.py | 57 - ...nomy_service_create_data_taxonomy_async.py | 57 - ...onomy_service_create_data_taxonomy_sync.py | 57 - ...omy_service_delete_data_attribute_async.py | 56 - ...ice_delete_data_attribute_binding_async.py | 57 - ...vice_delete_data_attribute_binding_sync.py | 57 - ...nomy_service_delete_data_attribute_sync.py | 56 - ...nomy_service_delete_data_taxonomy_async.py | 56 - ...onomy_service_delete_data_taxonomy_sync.py | 56 - ...xonomy_service_get_data_attribute_async.py | 52 - ...ervice_get_data_attribute_binding_async.py | 52 - ...service_get_data_attribute_binding_sync.py | 52 - ...axonomy_service_get_data_attribute_sync.py | 52 - ...axonomy_service_get_data_taxonomy_async.py | 52 - ...taxonomy_service_get_data_taxonomy_sync.py | 52 - ...vice_list_data_attribute_bindings_async.py | 53 - ...rvice_list_data_attribute_bindings_sync.py | 53 - ...nomy_service_list_data_attributes_async.py | 53 - ...onomy_service_list_data_attributes_sync.py | 53 - ...nomy_service_list_data_taxonomies_async.py | 53 - ...onomy_service_list_data_taxonomies_sync.py | 53 - ...omy_service_update_data_attribute_async.py | 55 - ...ice_update_data_attribute_binding_async.py | 59 - ...vice_update_data_attribute_binding_sync.py | 59 - ...nomy_service_update_data_attribute_sync.py | 55 - ...nomy_service_update_data_taxonomy_async.py | 55 - ...onomy_service_update_data_taxonomy_sync.py | 55 - ...rated_dataplex_service_cancel_job_async.py | 50 - ...erated_dataplex_service_cancel_job_sync.py | 50 - ...ted_dataplex_service_create_asset_async.py | 61 - ...ated_dataplex_service_create_asset_sync.py | 61 - ...taplex_service_create_environment_async.py | 61 - ...ataplex_service_create_environment_sync.py | 61 - ...ated_dataplex_service_create_lake_async.py | 57 - ...rated_dataplex_service_create_lake_sync.py | 57 - ...ated_dataplex_service_create_task_async.py | 64 - ...rated_dataplex_service_create_task_sync.py | 64 - ...ated_dataplex_service_create_zone_async.py | 62 - ...rated_dataplex_service_create_zone_sync.py | 62 - ...ted_dataplex_service_delete_asset_async.py | 56 - ...ated_dataplex_service_delete_asset_sync.py | 56 - ...taplex_service_delete_environment_async.py | 56 - ...ataplex_service_delete_environment_sync.py | 56 - ...ated_dataplex_service_delete_lake_async.py | 56 - ...rated_dataplex_service_delete_lake_sync.py | 56 - ...ated_dataplex_service_delete_task_async.py | 56 - ...rated_dataplex_service_delete_task_sync.py | 56 - ...ated_dataplex_service_delete_zone_async.py | 56 - ...rated_dataplex_service_delete_zone_sync.py | 56 - ...erated_dataplex_service_get_asset_async.py | 52 - ...nerated_dataplex_service_get_asset_sync.py | 52 - ..._dataplex_service_get_environment_async.py | 52 - ...d_dataplex_service_get_environment_sync.py | 52 - ...enerated_dataplex_service_get_job_async.py | 52 - ...generated_dataplex_service_get_job_sync.py | 52 - ...nerated_dataplex_service_get_lake_async.py | 52 - ...enerated_dataplex_service_get_lake_sync.py | 52 - ...nerated_dataplex_service_get_task_async.py | 52 - ...enerated_dataplex_service_get_task_sync.py | 52 - ...nerated_dataplex_service_get_zone_async.py | 52 - ...enerated_dataplex_service_get_zone_sync.py | 52 - ...taplex_service_list_asset_actions_async.py | 53 - ...ataplex_service_list_asset_actions_sync.py | 53 - ...ated_dataplex_service_list_assets_async.py | 53 - ...rated_dataplex_service_list_assets_sync.py | 53 - ...ataplex_service_list_environments_async.py | 53 - ...dataplex_service_list_environments_sync.py | 53 - ...erated_dataplex_service_list_jobs_async.py | 53 - ...nerated_dataplex_service_list_jobs_sync.py | 53 - ...ataplex_service_list_lake_actions_async.py | 53 - ...dataplex_service_list_lake_actions_sync.py | 53 - ...rated_dataplex_service_list_lakes_async.py | 53 - ...erated_dataplex_service_list_lakes_sync.py | 53 - ...ed_dataplex_service_list_sessions_async.py | 53 - ...ted_dataplex_service_list_sessions_sync.py | 53 - ...rated_dataplex_service_list_tasks_async.py | 53 - ...erated_dataplex_service_list_tasks_sync.py | 53 - ...ataplex_service_list_zone_actions_async.py | 53 - ...dataplex_service_list_zone_actions_sync.py | 53 - ...rated_dataplex_service_list_zones_async.py | 53 - ...erated_dataplex_service_list_zones_sync.py | 53 - ...nerated_dataplex_service_run_task_async.py | 52 - ...enerated_dataplex_service_run_task_sync.py | 52 - ...ted_dataplex_service_update_asset_async.py | 59 - ...ated_dataplex_service_update_asset_sync.py | 59 - ...taplex_service_update_environment_async.py | 59 - ...ataplex_service_update_environment_sync.py | 59 - ...ated_dataplex_service_update_lake_async.py | 55 - ...rated_dataplex_service_update_lake_sync.py | 55 - ...ated_dataplex_service_update_task_async.py | 62 - ...rated_dataplex_service_update_task_sync.py | 62 - ...ated_dataplex_service_update_zone_async.py | 60 - ...rated_dataplex_service_update_zone_sync.py | 60 - ...ed_metadata_service_create_entity_async.py | 62 - ...ted_metadata_service_create_entity_sync.py | 62 - ...metadata_service_create_partition_async.py | 57 - ..._metadata_service_create_partition_sync.py | 57 - ...ed_metadata_service_delete_entity_async.py | 51 - ...ted_metadata_service_delete_entity_sync.py | 51 - ...metadata_service_delete_partition_async.py | 50 - ..._metadata_service_delete_partition_sync.py | 50 - ...rated_metadata_service_get_entity_async.py | 52 - ...erated_metadata_service_get_entity_sync.py | 52 - ...ed_metadata_service_get_partition_async.py | 52 - ...ted_metadata_service_get_partition_sync.py | 52 - ...ed_metadata_service_list_entities_async.py | 54 - ...ted_metadata_service_list_entities_sync.py | 54 - ..._metadata_service_list_partitions_async.py | 53 - ...d_metadata_service_list_partitions_sync.py | 53 - ...ed_metadata_service_update_entity_async.py | 61 - ...ted_metadata_service_update_entity_sync.py | 61 - ...pet_metadata_google.cloud.dataplex.v1.json | 16421 --------------- .../v1/scripts/fixup_dataplex_v1_keywords.py | 275 - .../google-cloud-dataplex/v1/setup.py | 99 - .../v1/testing/constraints-3.10.txt | 7 - .../v1/testing/constraints-3.11.txt | 7 - .../v1/testing/constraints-3.12.txt | 7 - .../v1/testing/constraints-3.13.txt | 7 - .../v1/testing/constraints-3.7.txt | 11 - .../v1/testing/constraints-3.8.txt | 7 - .../v1/testing/constraints-3.9.txt | 7 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/dataplex_v1/__init__.py | 16 - .../gapic/dataplex_v1/test_catalog_service.py | 13193 ------------ .../gapic/dataplex_v1/test_content_service.py | 5202 ----- .../dataplex_v1/test_data_scan_service.py | 6013 ------ .../dataplex_v1/test_data_taxonomy_service.py | 8505 -------- .../dataplex_v1/test_dataplex_service.py | 16649 ---------------- .../dataplex_v1/test_metadata_service.py | 5952 ------ .../google/cloud/dataplex/__init__.py | 20 + .../google/cloud/dataplex/gapic_version.py | 2 +- .../google/cloud/dataplex_v1/__init__.py | 17 + .../cloud/dataplex_v1/gapic_metadata.json | 40 + .../google/cloud/dataplex_v1/gapic_version.py | 2 +- .../services/catalog_service/async_client.py | 645 +- .../services/catalog_service/client.py | 655 +- .../services/catalog_service/pagers.py | 152 + .../catalog_service/transports/base.py | 60 + .../catalog_service/transports/grpc.py | 165 +- .../transports/grpc_asyncio.py | 189 +- .../data_scan_service/async_client.py | 5 + .../services/data_scan_service/client.py | 42 + .../cloud/dataplex_v1/types/__init__.py | 17 + .../google/cloud/dataplex_v1/types/catalog.py | 1318 +- .../cloud/dataplex_v1/types/data_discovery.py | 28 +- .../cloud/dataplex_v1/types/data_profile.py | 10 +- .../cloud/dataplex_v1/types/data_quality.py | 11 +- .../cloud/dataplex_v1/types/datascans.py | 81 +- .../google/cloud/dataplex_v1/types/logs.py | 82 +- .../google/cloud/dataplex_v1/types/tasks.py | 7 +- ...talog_service_cancel_metadata_job_async.py | 0 ...atalog_service_cancel_metadata_job_sync.py | 0 ...talog_service_create_metadata_job_async.py | 0 ...atalog_service_create_metadata_job_sync.py | 0 ..._catalog_service_get_metadata_job_async.py | 0 ...d_catalog_service_get_metadata_job_sync.py | 0 ...atalog_service_list_metadata_jobs_async.py | 0 ...catalog_service_list_metadata_jobs_sync.py | 0 ...pet_metadata_google.cloud.dataplex.v1.json | 790 +- .../scripts/fixup_dataplex_v1_keywords.py | 4 + .../gapic/dataplex_v1/test_catalog_service.py | 1835 +- .../dataplex_v1/test_data_scan_service.py | 122 +- 339 files changed, 5642 insertions(+), 150234 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.flake8 delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/types/data_discovery.py (92%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py (100%) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc deleted file mode 100644 index 8df508b38cbc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dataplex/__init__.py - google/cloud/dataplex/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in deleted file mode 100644 index eae3b9346087..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dataplex *.py -recursive-include google/cloud/dataplex_v1 *.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/README.rst deleted file mode 100644 index a1910d929c5f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dataplex API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dataplex API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py deleted file mode 100644 index 56923e180c46..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dataplex documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dataplex" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dataplex-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dataplex.tex", - u"google-cloud-dataplex Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dataplex", - u"Google Cloud Dataplex Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dataplex", - u"google-cloud-dataplex Documentation", - author, - "google-cloud-dataplex", - "GAPIC library for Google Cloud Dataplex API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst deleted file mode 100644 index ef6306fadb87..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CatalogService --------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.catalog_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.catalog_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst deleted file mode 100644 index ce3774365501..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContentService --------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.content_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.content_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst deleted file mode 100644 index c9281cda5823..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataScanService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.data_scan_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.data_scan_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst deleted file mode 100644 index b2a185a3c43f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataTaxonomyService -------------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst deleted file mode 100644 index 5ecb20ccef96..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataplexService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.dataplex_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.dataplex_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst deleted file mode 100644 index d5bf19660ab5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetadataService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.metadata_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.metadata_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst deleted file mode 100644 index 42044fdd76d1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst +++ /dev/null @@ -1,11 +0,0 @@ -Services for Google Cloud Dataplex v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - catalog_service - content_service - dataplex_service - data_scan_service - data_taxonomy_service - metadata_service diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst deleted file mode 100644 index 391acd51ef80..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dataplex v1 API -====================================== - -.. automodule:: google.cloud.dataplex_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst deleted file mode 100644 index 56aeec907611..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dataplex_v1/services_ - dataplex_v1/types_ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py deleted file mode 100644 index ce1a3bf8386c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py +++ /dev/null @@ -1,407 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataplex import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dataplex_v1.services.catalog_service.client import CatalogServiceClient -from google.cloud.dataplex_v1.services.catalog_service.async_client import CatalogServiceAsyncClient -from google.cloud.dataplex_v1.services.content_service.client import ContentServiceClient -from google.cloud.dataplex_v1.services.content_service.async_client import ContentServiceAsyncClient -from google.cloud.dataplex_v1.services.dataplex_service.client import DataplexServiceClient -from google.cloud.dataplex_v1.services.dataplex_service.async_client import DataplexServiceAsyncClient -from google.cloud.dataplex_v1.services.data_scan_service.client import DataScanServiceClient -from google.cloud.dataplex_v1.services.data_scan_service.async_client import DataScanServiceAsyncClient -from google.cloud.dataplex_v1.services.data_taxonomy_service.client import DataTaxonomyServiceClient -from google.cloud.dataplex_v1.services.data_taxonomy_service.async_client import DataTaxonomyServiceAsyncClient -from google.cloud.dataplex_v1.services.metadata_service.client import MetadataServiceClient -from google.cloud.dataplex_v1.services.metadata_service.async_client import MetadataServiceAsyncClient - -from google.cloud.dataplex_v1.types.analyze import Content -from google.cloud.dataplex_v1.types.analyze import Environment -from google.cloud.dataplex_v1.types.analyze import Session -from google.cloud.dataplex_v1.types.catalog import Aspect -from google.cloud.dataplex_v1.types.catalog import AspectSource -from google.cloud.dataplex_v1.types.catalog import AspectType -from google.cloud.dataplex_v1.types.catalog import CancelMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import CreateAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import CreateMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import DeleteAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import Entry -from google.cloud.dataplex_v1.types.catalog import EntryGroup -from google.cloud.dataplex_v1.types.catalog import EntrySource -from google.cloud.dataplex_v1.types.catalog import EntryType -from google.cloud.dataplex_v1.types.catalog import GetAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import GetMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import ImportItem -from google.cloud.dataplex_v1.types.catalog import ListAspectTypesRequest -from google.cloud.dataplex_v1.types.catalog import ListAspectTypesResponse -from google.cloud.dataplex_v1.types.catalog import ListEntriesRequest -from google.cloud.dataplex_v1.types.catalog import ListEntriesResponse -from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsRequest -from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsResponse -from google.cloud.dataplex_v1.types.catalog import ListEntryTypesRequest -from google.cloud.dataplex_v1.types.catalog import ListEntryTypesResponse -from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsRequest -from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsResponse -from google.cloud.dataplex_v1.types.catalog import LookupEntryRequest -from google.cloud.dataplex_v1.types.catalog import MetadataJob -from google.cloud.dataplex_v1.types.catalog import SearchEntriesRequest -from google.cloud.dataplex_v1.types.catalog import SearchEntriesResponse -from google.cloud.dataplex_v1.types.catalog import SearchEntriesResult -from google.cloud.dataplex_v1.types.catalog import UpdateAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import EntryView -from google.cloud.dataplex_v1.types.catalog import TransferStatus -from google.cloud.dataplex_v1.types.content import CreateContentRequest -from google.cloud.dataplex_v1.types.content import DeleteContentRequest -from google.cloud.dataplex_v1.types.content import GetContentRequest -from google.cloud.dataplex_v1.types.content import ListContentRequest -from google.cloud.dataplex_v1.types.content import ListContentResponse -from google.cloud.dataplex_v1.types.content import UpdateContentRequest -from google.cloud.dataplex_v1.types.data_discovery import DataDiscoveryResult -from google.cloud.dataplex_v1.types.data_discovery import DataDiscoverySpec -from google.cloud.dataplex_v1.types.data_profile import DataProfileResult -from google.cloud.dataplex_v1.types.data_profile import DataProfileSpec -from google.cloud.dataplex_v1.types.data_quality import DataQualityColumnResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityDimension -from google.cloud.dataplex_v1.types.data_quality import DataQualityDimensionResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityRule -from google.cloud.dataplex_v1.types.data_quality import DataQualityRuleResult -from google.cloud.dataplex_v1.types.data_quality import DataQualitySpec -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DataAttribute -from google.cloud.dataplex_v1.types.data_taxonomy import DataAttributeBinding -from google.cloud.dataplex_v1.types.data_taxonomy import DataTaxonomy -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsResponse -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesResponse -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesResponse -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataTaxonomyRequest -from google.cloud.dataplex_v1.types.datascans import CreateDataScanRequest -from google.cloud.dataplex_v1.types.datascans import DataScan -from google.cloud.dataplex_v1.types.datascans import DataScanJob -from google.cloud.dataplex_v1.types.datascans import DeleteDataScanRequest -from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesRequest -from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesResponse -from google.cloud.dataplex_v1.types.datascans import GetDataScanJobRequest -from google.cloud.dataplex_v1.types.datascans import GetDataScanRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsResponse -from google.cloud.dataplex_v1.types.datascans import ListDataScansRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScansResponse -from google.cloud.dataplex_v1.types.datascans import RunDataScanRequest -from google.cloud.dataplex_v1.types.datascans import RunDataScanResponse -from google.cloud.dataplex_v1.types.datascans import UpdateDataScanRequest -from google.cloud.dataplex_v1.types.datascans import DataScanType -from google.cloud.dataplex_v1.types.logs import DataQualityScanRuleResult -from google.cloud.dataplex_v1.types.logs import DataScanEvent -from google.cloud.dataplex_v1.types.logs import DiscoveryEvent -from google.cloud.dataplex_v1.types.logs import GovernanceEvent -from google.cloud.dataplex_v1.types.logs import JobEvent -from google.cloud.dataplex_v1.types.logs import SessionEvent -from google.cloud.dataplex_v1.types.metadata_ import CreateEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import CreatePartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import DeleteEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import DeletePartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import Entity -from google.cloud.dataplex_v1.types.metadata_ import GetEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import GetPartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesRequest -from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesResponse -from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsRequest -from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsResponse -from google.cloud.dataplex_v1.types.metadata_ import Partition -from google.cloud.dataplex_v1.types.metadata_ import Schema -from google.cloud.dataplex_v1.types.metadata_ import StorageAccess -from google.cloud.dataplex_v1.types.metadata_ import StorageFormat -from google.cloud.dataplex_v1.types.metadata_ import UpdateEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import StorageSystem -from google.cloud.dataplex_v1.types.processing import DataSource -from google.cloud.dataplex_v1.types.processing import ScannedData -from google.cloud.dataplex_v1.types.processing import Trigger -from google.cloud.dataplex_v1.types.resources import Action -from google.cloud.dataplex_v1.types.resources import Asset -from google.cloud.dataplex_v1.types.resources import AssetStatus -from google.cloud.dataplex_v1.types.resources import Lake -from google.cloud.dataplex_v1.types.resources import Zone -from google.cloud.dataplex_v1.types.resources import State -from google.cloud.dataplex_v1.types.security import DataAccessSpec -from google.cloud.dataplex_v1.types.security import ResourceAccessSpec -from google.cloud.dataplex_v1.types.service import CancelJobRequest -from google.cloud.dataplex_v1.types.service import CreateAssetRequest -from google.cloud.dataplex_v1.types.service import CreateEnvironmentRequest -from google.cloud.dataplex_v1.types.service import CreateLakeRequest -from google.cloud.dataplex_v1.types.service import CreateTaskRequest -from google.cloud.dataplex_v1.types.service import CreateZoneRequest -from google.cloud.dataplex_v1.types.service import DeleteAssetRequest -from google.cloud.dataplex_v1.types.service import DeleteEnvironmentRequest -from google.cloud.dataplex_v1.types.service import DeleteLakeRequest -from google.cloud.dataplex_v1.types.service import DeleteTaskRequest -from google.cloud.dataplex_v1.types.service import DeleteZoneRequest -from google.cloud.dataplex_v1.types.service import GetAssetRequest -from google.cloud.dataplex_v1.types.service import GetEnvironmentRequest -from google.cloud.dataplex_v1.types.service import GetJobRequest -from google.cloud.dataplex_v1.types.service import GetLakeRequest -from google.cloud.dataplex_v1.types.service import GetTaskRequest -from google.cloud.dataplex_v1.types.service import GetZoneRequest -from google.cloud.dataplex_v1.types.service import ListActionsResponse -from google.cloud.dataplex_v1.types.service import ListAssetActionsRequest -from google.cloud.dataplex_v1.types.service import ListAssetsRequest -from google.cloud.dataplex_v1.types.service import ListAssetsResponse -from google.cloud.dataplex_v1.types.service import ListEnvironmentsRequest -from google.cloud.dataplex_v1.types.service import ListEnvironmentsResponse -from google.cloud.dataplex_v1.types.service import ListJobsRequest -from google.cloud.dataplex_v1.types.service import ListJobsResponse -from google.cloud.dataplex_v1.types.service import ListLakeActionsRequest -from google.cloud.dataplex_v1.types.service import ListLakesRequest -from google.cloud.dataplex_v1.types.service import ListLakesResponse -from google.cloud.dataplex_v1.types.service import ListSessionsRequest -from google.cloud.dataplex_v1.types.service import ListSessionsResponse -from google.cloud.dataplex_v1.types.service import ListTasksRequest -from google.cloud.dataplex_v1.types.service import ListTasksResponse -from google.cloud.dataplex_v1.types.service import ListZoneActionsRequest -from google.cloud.dataplex_v1.types.service import ListZonesRequest -from google.cloud.dataplex_v1.types.service import ListZonesResponse -from google.cloud.dataplex_v1.types.service import OperationMetadata -from google.cloud.dataplex_v1.types.service import RunTaskRequest -from google.cloud.dataplex_v1.types.service import RunTaskResponse -from google.cloud.dataplex_v1.types.service import UpdateAssetRequest -from google.cloud.dataplex_v1.types.service import UpdateEnvironmentRequest -from google.cloud.dataplex_v1.types.service import UpdateLakeRequest -from google.cloud.dataplex_v1.types.service import UpdateTaskRequest -from google.cloud.dataplex_v1.types.service import UpdateZoneRequest -from google.cloud.dataplex_v1.types.tasks import Job -from google.cloud.dataplex_v1.types.tasks import Task - -__all__ = ('CatalogServiceClient', - 'CatalogServiceAsyncClient', - 'ContentServiceClient', - 'ContentServiceAsyncClient', - 'DataplexServiceClient', - 'DataplexServiceAsyncClient', - 'DataScanServiceClient', - 'DataScanServiceAsyncClient', - 'DataTaxonomyServiceClient', - 'DataTaxonomyServiceAsyncClient', - 'MetadataServiceClient', - 'MetadataServiceAsyncClient', - 'Content', - 'Environment', - 'Session', - 'Aspect', - 'AspectSource', - 'AspectType', - 'CancelMetadataJobRequest', - 'CreateAspectTypeRequest', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateEntryTypeRequest', - 'CreateMetadataJobRequest', - 'DeleteAspectTypeRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteEntryTypeRequest', - 'Entry', - 'EntryGroup', - 'EntrySource', - 'EntryType', - 'GetAspectTypeRequest', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetEntryTypeRequest', - 'GetMetadataJobRequest', - 'ImportItem', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'LookupEntryRequest', - 'MetadataJob', - 'SearchEntriesRequest', - 'SearchEntriesResponse', - 'SearchEntriesResult', - 'UpdateAspectTypeRequest', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateEntryTypeRequest', - 'EntryView', - 'TransferStatus', - 'CreateContentRequest', - 'DeleteContentRequest', - 'GetContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'UpdateContentRequest', - 'DataDiscoveryResult', - 'DataDiscoverySpec', - 'DataProfileResult', - 'DataProfileSpec', - 'DataQualityColumnResult', - 'DataQualityDimension', - 'DataQualityDimensionResult', - 'DataQualityResult', - 'DataQualityRule', - 'DataQualityRuleResult', - 'DataQualitySpec', - 'CreateDataAttributeBindingRequest', - 'CreateDataAttributeRequest', - 'CreateDataTaxonomyRequest', - 'DataAttribute', - 'DataAttributeBinding', - 'DataTaxonomy', - 'DeleteDataAttributeBindingRequest', - 'DeleteDataAttributeRequest', - 'DeleteDataTaxonomyRequest', - 'GetDataAttributeBindingRequest', - 'GetDataAttributeRequest', - 'GetDataTaxonomyRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'UpdateDataAttributeBindingRequest', - 'UpdateDataAttributeRequest', - 'UpdateDataTaxonomyRequest', - 'CreateDataScanRequest', - 'DataScan', - 'DataScanJob', - 'DeleteDataScanRequest', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'GetDataScanJobRequest', - 'GetDataScanRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'UpdateDataScanRequest', - 'DataScanType', - 'DataQualityScanRuleResult', - 'DataScanEvent', - 'DiscoveryEvent', - 'GovernanceEvent', - 'JobEvent', - 'SessionEvent', - 'CreateEntityRequest', - 'CreatePartitionRequest', - 'DeleteEntityRequest', - 'DeletePartitionRequest', - 'Entity', - 'GetEntityRequest', - 'GetPartitionRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'ListPartitionsRequest', - 'ListPartitionsResponse', - 'Partition', - 'Schema', - 'StorageAccess', - 'StorageFormat', - 'UpdateEntityRequest', - 'StorageSystem', - 'DataSource', - 'ScannedData', - 'Trigger', - 'Action', - 'Asset', - 'AssetStatus', - 'Lake', - 'Zone', - 'State', - 'DataAccessSpec', - 'ResourceAccessSpec', - 'CancelJobRequest', - 'CreateAssetRequest', - 'CreateEnvironmentRequest', - 'CreateLakeRequest', - 'CreateTaskRequest', - 'CreateZoneRequest', - 'DeleteAssetRequest', - 'DeleteEnvironmentRequest', - 'DeleteLakeRequest', - 'DeleteTaskRequest', - 'DeleteZoneRequest', - 'GetAssetRequest', - 'GetEnvironmentRequest', - 'GetJobRequest', - 'GetLakeRequest', - 'GetTaskRequest', - 'GetZoneRequest', - 'ListActionsResponse', - 'ListAssetActionsRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListLakeActionsRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'ListZoneActionsRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'OperationMetadata', - 'RunTaskRequest', - 'RunTaskResponse', - 'UpdateAssetRequest', - 'UpdateEnvironmentRequest', - 'UpdateLakeRequest', - 'UpdateTaskRequest', - 'UpdateZoneRequest', - 'Job', - 'Task', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed deleted file mode 100644 index c932c263028e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py deleted file mode 100644 index 7fd5f0814f43..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py +++ /dev/null @@ -1,408 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataplex_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.catalog_service import CatalogServiceClient -from .services.catalog_service import CatalogServiceAsyncClient -from .services.content_service import ContentServiceClient -from .services.content_service import ContentServiceAsyncClient -from .services.dataplex_service import DataplexServiceClient -from .services.dataplex_service import DataplexServiceAsyncClient -from .services.data_scan_service import DataScanServiceClient -from .services.data_scan_service import DataScanServiceAsyncClient -from .services.data_taxonomy_service import DataTaxonomyServiceClient -from .services.data_taxonomy_service import DataTaxonomyServiceAsyncClient -from .services.metadata_service import MetadataServiceClient -from .services.metadata_service import MetadataServiceAsyncClient - -from .types.analyze import Content -from .types.analyze import Environment -from .types.analyze import Session -from .types.catalog import Aspect -from .types.catalog import AspectSource -from .types.catalog import AspectType -from .types.catalog import CancelMetadataJobRequest -from .types.catalog import CreateAspectTypeRequest -from .types.catalog import CreateEntryGroupRequest -from .types.catalog import CreateEntryRequest -from .types.catalog import CreateEntryTypeRequest -from .types.catalog import CreateMetadataJobRequest -from .types.catalog import DeleteAspectTypeRequest -from .types.catalog import DeleteEntryGroupRequest -from .types.catalog import DeleteEntryRequest -from .types.catalog import DeleteEntryTypeRequest -from .types.catalog import Entry -from .types.catalog import EntryGroup -from .types.catalog import EntrySource -from .types.catalog import EntryType -from .types.catalog import GetAspectTypeRequest -from .types.catalog import GetEntryGroupRequest -from .types.catalog import GetEntryRequest -from .types.catalog import GetEntryTypeRequest -from .types.catalog import GetMetadataJobRequest -from .types.catalog import ImportItem -from .types.catalog import ListAspectTypesRequest -from .types.catalog import ListAspectTypesResponse -from .types.catalog import ListEntriesRequest -from .types.catalog import ListEntriesResponse -from .types.catalog import ListEntryGroupsRequest -from .types.catalog import ListEntryGroupsResponse -from .types.catalog import ListEntryTypesRequest -from .types.catalog import ListEntryTypesResponse -from .types.catalog import ListMetadataJobsRequest -from .types.catalog import ListMetadataJobsResponse -from .types.catalog import LookupEntryRequest -from .types.catalog import MetadataJob -from .types.catalog import SearchEntriesRequest -from .types.catalog import SearchEntriesResponse -from .types.catalog import SearchEntriesResult -from .types.catalog import UpdateAspectTypeRequest -from .types.catalog import UpdateEntryGroupRequest -from .types.catalog import UpdateEntryRequest -from .types.catalog import UpdateEntryTypeRequest -from .types.catalog import EntryView -from .types.catalog import TransferStatus -from .types.content import CreateContentRequest -from .types.content import DeleteContentRequest -from .types.content import GetContentRequest -from .types.content import ListContentRequest -from .types.content import ListContentResponse -from .types.content import UpdateContentRequest -from .types.data_discovery import DataDiscoveryResult -from .types.data_discovery import DataDiscoverySpec -from .types.data_profile import DataProfileResult -from .types.data_profile import DataProfileSpec -from .types.data_quality import DataQualityColumnResult -from .types.data_quality import DataQualityDimension -from .types.data_quality import DataQualityDimensionResult -from .types.data_quality import DataQualityResult -from .types.data_quality import DataQualityRule -from .types.data_quality import DataQualityRuleResult -from .types.data_quality import DataQualitySpec -from .types.data_taxonomy import CreateDataAttributeBindingRequest -from .types.data_taxonomy import CreateDataAttributeRequest -from .types.data_taxonomy import CreateDataTaxonomyRequest -from .types.data_taxonomy import DataAttribute -from .types.data_taxonomy import DataAttributeBinding -from .types.data_taxonomy import DataTaxonomy -from .types.data_taxonomy import DeleteDataAttributeBindingRequest -from .types.data_taxonomy import DeleteDataAttributeRequest -from .types.data_taxonomy import DeleteDataTaxonomyRequest -from .types.data_taxonomy import GetDataAttributeBindingRequest -from .types.data_taxonomy import GetDataAttributeRequest -from .types.data_taxonomy import GetDataTaxonomyRequest -from .types.data_taxonomy import ListDataAttributeBindingsRequest -from .types.data_taxonomy import ListDataAttributeBindingsResponse -from .types.data_taxonomy import ListDataAttributesRequest -from .types.data_taxonomy import ListDataAttributesResponse -from .types.data_taxonomy import ListDataTaxonomiesRequest -from .types.data_taxonomy import ListDataTaxonomiesResponse -from .types.data_taxonomy import UpdateDataAttributeBindingRequest -from .types.data_taxonomy import UpdateDataAttributeRequest -from .types.data_taxonomy import UpdateDataTaxonomyRequest -from .types.datascans import CreateDataScanRequest -from .types.datascans import DataScan -from .types.datascans import DataScanJob -from .types.datascans import DeleteDataScanRequest -from .types.datascans import GenerateDataQualityRulesRequest -from .types.datascans import GenerateDataQualityRulesResponse -from .types.datascans import GetDataScanJobRequest -from .types.datascans import GetDataScanRequest -from .types.datascans import ListDataScanJobsRequest -from .types.datascans import ListDataScanJobsResponse -from .types.datascans import ListDataScansRequest -from .types.datascans import ListDataScansResponse -from .types.datascans import RunDataScanRequest -from .types.datascans import RunDataScanResponse -from .types.datascans import UpdateDataScanRequest -from .types.datascans import DataScanType -from .types.logs import DataQualityScanRuleResult -from .types.logs import DataScanEvent -from .types.logs import DiscoveryEvent -from .types.logs import GovernanceEvent -from .types.logs import JobEvent -from .types.logs import SessionEvent -from .types.metadata_ import CreateEntityRequest -from .types.metadata_ import CreatePartitionRequest -from .types.metadata_ import DeleteEntityRequest -from .types.metadata_ import DeletePartitionRequest -from .types.metadata_ import Entity -from .types.metadata_ import GetEntityRequest -from .types.metadata_ import GetPartitionRequest -from .types.metadata_ import ListEntitiesRequest -from .types.metadata_ import ListEntitiesResponse -from .types.metadata_ import ListPartitionsRequest -from .types.metadata_ import ListPartitionsResponse -from .types.metadata_ import Partition -from .types.metadata_ import Schema -from .types.metadata_ import StorageAccess -from .types.metadata_ import StorageFormat -from .types.metadata_ import UpdateEntityRequest -from .types.metadata_ import StorageSystem -from .types.processing import DataSource -from .types.processing import ScannedData -from .types.processing import Trigger -from .types.resources import Action -from .types.resources import Asset -from .types.resources import AssetStatus -from .types.resources import Lake -from .types.resources import Zone -from .types.resources import State -from .types.security import DataAccessSpec -from .types.security import ResourceAccessSpec -from .types.service import CancelJobRequest -from .types.service import CreateAssetRequest -from .types.service import CreateEnvironmentRequest -from .types.service import CreateLakeRequest -from .types.service import CreateTaskRequest -from .types.service import CreateZoneRequest -from .types.service import DeleteAssetRequest -from .types.service import DeleteEnvironmentRequest -from .types.service import DeleteLakeRequest -from .types.service import DeleteTaskRequest -from .types.service import DeleteZoneRequest -from .types.service import GetAssetRequest -from .types.service import GetEnvironmentRequest -from .types.service import GetJobRequest -from .types.service import GetLakeRequest -from .types.service import GetTaskRequest -from .types.service import GetZoneRequest -from .types.service import ListActionsResponse -from .types.service import ListAssetActionsRequest -from .types.service import ListAssetsRequest -from .types.service import ListAssetsResponse -from .types.service import ListEnvironmentsRequest -from .types.service import ListEnvironmentsResponse -from .types.service import ListJobsRequest -from .types.service import ListJobsResponse -from .types.service import ListLakeActionsRequest -from .types.service import ListLakesRequest -from .types.service import ListLakesResponse -from .types.service import ListSessionsRequest -from .types.service import ListSessionsResponse -from .types.service import ListTasksRequest -from .types.service import ListTasksResponse -from .types.service import ListZoneActionsRequest -from .types.service import ListZonesRequest -from .types.service import ListZonesResponse -from .types.service import OperationMetadata -from .types.service import RunTaskRequest -from .types.service import RunTaskResponse -from .types.service import UpdateAssetRequest -from .types.service import UpdateEnvironmentRequest -from .types.service import UpdateLakeRequest -from .types.service import UpdateTaskRequest -from .types.service import UpdateZoneRequest -from .types.tasks import Job -from .types.tasks import Task - -__all__ = ( - 'CatalogServiceAsyncClient', - 'ContentServiceAsyncClient', - 'DataScanServiceAsyncClient', - 'DataTaxonomyServiceAsyncClient', - 'DataplexServiceAsyncClient', - 'MetadataServiceAsyncClient', -'Action', -'Aspect', -'AspectSource', -'AspectType', -'Asset', -'AssetStatus', -'CancelJobRequest', -'CancelMetadataJobRequest', -'CatalogServiceClient', -'Content', -'ContentServiceClient', -'CreateAspectTypeRequest', -'CreateAssetRequest', -'CreateContentRequest', -'CreateDataAttributeBindingRequest', -'CreateDataAttributeRequest', -'CreateDataScanRequest', -'CreateDataTaxonomyRequest', -'CreateEntityRequest', -'CreateEntryGroupRequest', -'CreateEntryRequest', -'CreateEntryTypeRequest', -'CreateEnvironmentRequest', -'CreateLakeRequest', -'CreateMetadataJobRequest', -'CreatePartitionRequest', -'CreateTaskRequest', -'CreateZoneRequest', -'DataAccessSpec', -'DataAttribute', -'DataAttributeBinding', -'DataDiscoveryResult', -'DataDiscoverySpec', -'DataProfileResult', -'DataProfileSpec', -'DataQualityColumnResult', -'DataQualityDimension', -'DataQualityDimensionResult', -'DataQualityResult', -'DataQualityRule', -'DataQualityRuleResult', -'DataQualityScanRuleResult', -'DataQualitySpec', -'DataScan', -'DataScanEvent', -'DataScanJob', -'DataScanServiceClient', -'DataScanType', -'DataSource', -'DataTaxonomy', -'DataTaxonomyServiceClient', -'DataplexServiceClient', -'DeleteAspectTypeRequest', -'DeleteAssetRequest', -'DeleteContentRequest', -'DeleteDataAttributeBindingRequest', -'DeleteDataAttributeRequest', -'DeleteDataScanRequest', -'DeleteDataTaxonomyRequest', -'DeleteEntityRequest', -'DeleteEntryGroupRequest', -'DeleteEntryRequest', -'DeleteEntryTypeRequest', -'DeleteEnvironmentRequest', -'DeleteLakeRequest', -'DeletePartitionRequest', -'DeleteTaskRequest', -'DeleteZoneRequest', -'DiscoveryEvent', -'Entity', -'Entry', -'EntryGroup', -'EntrySource', -'EntryType', -'EntryView', -'Environment', -'GenerateDataQualityRulesRequest', -'GenerateDataQualityRulesResponse', -'GetAspectTypeRequest', -'GetAssetRequest', -'GetContentRequest', -'GetDataAttributeBindingRequest', -'GetDataAttributeRequest', -'GetDataScanJobRequest', -'GetDataScanRequest', -'GetDataTaxonomyRequest', -'GetEntityRequest', -'GetEntryGroupRequest', -'GetEntryRequest', -'GetEntryTypeRequest', -'GetEnvironmentRequest', -'GetJobRequest', -'GetLakeRequest', -'GetMetadataJobRequest', -'GetPartitionRequest', -'GetTaskRequest', -'GetZoneRequest', -'GovernanceEvent', -'ImportItem', -'Job', -'JobEvent', -'Lake', -'ListActionsResponse', -'ListAspectTypesRequest', -'ListAspectTypesResponse', -'ListAssetActionsRequest', -'ListAssetsRequest', -'ListAssetsResponse', -'ListContentRequest', -'ListContentResponse', -'ListDataAttributeBindingsRequest', -'ListDataAttributeBindingsResponse', -'ListDataAttributesRequest', -'ListDataAttributesResponse', -'ListDataScanJobsRequest', -'ListDataScanJobsResponse', -'ListDataScansRequest', -'ListDataScansResponse', -'ListDataTaxonomiesRequest', -'ListDataTaxonomiesResponse', -'ListEntitiesRequest', -'ListEntitiesResponse', -'ListEntriesRequest', -'ListEntriesResponse', -'ListEntryGroupsRequest', -'ListEntryGroupsResponse', -'ListEntryTypesRequest', -'ListEntryTypesResponse', -'ListEnvironmentsRequest', -'ListEnvironmentsResponse', -'ListJobsRequest', -'ListJobsResponse', -'ListLakeActionsRequest', -'ListLakesRequest', -'ListLakesResponse', -'ListMetadataJobsRequest', -'ListMetadataJobsResponse', -'ListPartitionsRequest', -'ListPartitionsResponse', -'ListSessionsRequest', -'ListSessionsResponse', -'ListTasksRequest', -'ListTasksResponse', -'ListZoneActionsRequest', -'ListZonesRequest', -'ListZonesResponse', -'LookupEntryRequest', -'MetadataJob', -'MetadataServiceClient', -'OperationMetadata', -'Partition', -'ResourceAccessSpec', -'RunDataScanRequest', -'RunDataScanResponse', -'RunTaskRequest', -'RunTaskResponse', -'ScannedData', -'Schema', -'SearchEntriesRequest', -'SearchEntriesResponse', -'SearchEntriesResult', -'Session', -'SessionEvent', -'State', -'StorageAccess', -'StorageFormat', -'StorageSystem', -'Task', -'TransferStatus', -'Trigger', -'UpdateAspectTypeRequest', -'UpdateAssetRequest', -'UpdateContentRequest', -'UpdateDataAttributeBindingRequest', -'UpdateDataAttributeRequest', -'UpdateDataScanRequest', -'UpdateDataTaxonomyRequest', -'UpdateEntityRequest', -'UpdateEntryGroupRequest', -'UpdateEntryRequest', -'UpdateEntryTypeRequest', -'UpdateEnvironmentRequest', -'UpdateLakeRequest', -'UpdateTaskRequest', -'UpdateZoneRequest', -'Zone', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json deleted file mode 100644 index 9fb1150241c8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json +++ /dev/null @@ -1,1093 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dataplex_v1", - "protoPackage": "google.cloud.dataplex.v1", - "schema": "1.0", - "services": { - "CatalogService": { - "clients": { - "grpc": { - "libraryClient": "CatalogServiceClient", - "rpcs": { - "CancelMetadataJob": { - "methods": [ - "cancel_metadata_job" - ] - }, - "CreateAspectType": { - "methods": [ - "create_aspect_type" - ] - }, - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateEntryType": { - "methods": [ - "create_entry_type" - ] - }, - "CreateMetadataJob": { - "methods": [ - "create_metadata_job" - ] - }, - "DeleteAspectType": { - "methods": [ - "delete_aspect_type" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteEntryType": { - "methods": [ - "delete_entry_type" - ] - }, - "GetAspectType": { - "methods": [ - "get_aspect_type" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetEntryType": { - "methods": [ - "get_entry_type" - ] - }, - "GetMetadataJob": { - "methods": [ - "get_metadata_job" - ] - }, - "ListAspectTypes": { - "methods": [ - "list_aspect_types" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListEntryTypes": { - "methods": [ - "list_entry_types" - ] - }, - "ListMetadataJobs": { - "methods": [ - "list_metadata_jobs" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "SearchEntries": { - "methods": [ - "search_entries" - ] - }, - "UpdateAspectType": { - "methods": [ - "update_aspect_type" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateEntryType": { - "methods": [ - "update_entry_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CatalogServiceAsyncClient", - "rpcs": { - "CancelMetadataJob": { - "methods": [ - "cancel_metadata_job" - ] - }, - "CreateAspectType": { - "methods": [ - "create_aspect_type" - ] - }, - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateEntryType": { - "methods": [ - "create_entry_type" - ] - }, - "CreateMetadataJob": { - "methods": [ - "create_metadata_job" - ] - }, - "DeleteAspectType": { - "methods": [ - "delete_aspect_type" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteEntryType": { - "methods": [ - "delete_entry_type" - ] - }, - "GetAspectType": { - "methods": [ - "get_aspect_type" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetEntryType": { - "methods": [ - "get_entry_type" - ] - }, - "GetMetadataJob": { - "methods": [ - "get_metadata_job" - ] - }, - "ListAspectTypes": { - "methods": [ - "list_aspect_types" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListEntryTypes": { - "methods": [ - "list_entry_types" - ] - }, - "ListMetadataJobs": { - "methods": [ - "list_metadata_jobs" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "SearchEntries": { - "methods": [ - "search_entries" - ] - }, - "UpdateAspectType": { - "methods": [ - "update_aspect_type" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateEntryType": { - "methods": [ - "update_entry_type" - ] - } - } - } - } - }, - "ContentService": { - "clients": { - "grpc": { - "libraryClient": "ContentServiceClient", - "rpcs": { - "CreateContent": { - "methods": [ - "create_content" - ] - }, - "DeleteContent": { - "methods": [ - "delete_content" - ] - }, - "GetContent": { - "methods": [ - "get_content" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListContent": { - "methods": [ - "list_content" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateContent": { - "methods": [ - "update_content" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ContentServiceAsyncClient", - "rpcs": { - "CreateContent": { - "methods": [ - "create_content" - ] - }, - "DeleteContent": { - "methods": [ - "delete_content" - ] - }, - "GetContent": { - "methods": [ - "get_content" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListContent": { - "methods": [ - "list_content" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateContent": { - "methods": [ - "update_content" - ] - } - } - } - } - }, - "DataScanService": { - "clients": { - "grpc": { - "libraryClient": "DataScanServiceClient", - "rpcs": { - "CreateDataScan": { - "methods": [ - "create_data_scan" - ] - }, - "DeleteDataScan": { - "methods": [ - "delete_data_scan" - ] - }, - "GenerateDataQualityRules": { - "methods": [ - "generate_data_quality_rules" - ] - }, - "GetDataScan": { - "methods": [ - "get_data_scan" - ] - }, - "GetDataScanJob": { - "methods": [ - "get_data_scan_job" - ] - }, - "ListDataScanJobs": { - "methods": [ - "list_data_scan_jobs" - ] - }, - "ListDataScans": { - "methods": [ - "list_data_scans" - ] - }, - "RunDataScan": { - "methods": [ - "run_data_scan" - ] - }, - "UpdateDataScan": { - "methods": [ - "update_data_scan" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataScanServiceAsyncClient", - "rpcs": { - "CreateDataScan": { - "methods": [ - "create_data_scan" - ] - }, - "DeleteDataScan": { - "methods": [ - "delete_data_scan" - ] - }, - "GenerateDataQualityRules": { - "methods": [ - "generate_data_quality_rules" - ] - }, - "GetDataScan": { - "methods": [ - "get_data_scan" - ] - }, - "GetDataScanJob": { - "methods": [ - "get_data_scan_job" - ] - }, - "ListDataScanJobs": { - "methods": [ - "list_data_scan_jobs" - ] - }, - "ListDataScans": { - "methods": [ - "list_data_scans" - ] - }, - "RunDataScan": { - "methods": [ - "run_data_scan" - ] - }, - "UpdateDataScan": { - "methods": [ - "update_data_scan" - ] - } - } - } - } - }, - "DataTaxonomyService": { - "clients": { - "grpc": { - "libraryClient": "DataTaxonomyServiceClient", - "rpcs": { - "CreateDataAttribute": { - "methods": [ - "create_data_attribute" - ] - }, - "CreateDataAttributeBinding": { - "methods": [ - "create_data_attribute_binding" - ] - }, - "CreateDataTaxonomy": { - "methods": [ - "create_data_taxonomy" - ] - }, - "DeleteDataAttribute": { - "methods": [ - "delete_data_attribute" - ] - }, - "DeleteDataAttributeBinding": { - "methods": [ - "delete_data_attribute_binding" - ] - }, - "DeleteDataTaxonomy": { - "methods": [ - "delete_data_taxonomy" - ] - }, - "GetDataAttribute": { - "methods": [ - "get_data_attribute" - ] - }, - "GetDataAttributeBinding": { - "methods": [ - "get_data_attribute_binding" - ] - }, - "GetDataTaxonomy": { - "methods": [ - "get_data_taxonomy" - ] - }, - "ListDataAttributeBindings": { - "methods": [ - "list_data_attribute_bindings" - ] - }, - "ListDataAttributes": { - "methods": [ - "list_data_attributes" - ] - }, - "ListDataTaxonomies": { - "methods": [ - "list_data_taxonomies" - ] - }, - "UpdateDataAttribute": { - "methods": [ - "update_data_attribute" - ] - }, - "UpdateDataAttributeBinding": { - "methods": [ - "update_data_attribute_binding" - ] - }, - "UpdateDataTaxonomy": { - "methods": [ - "update_data_taxonomy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataTaxonomyServiceAsyncClient", - "rpcs": { - "CreateDataAttribute": { - "methods": [ - "create_data_attribute" - ] - }, - "CreateDataAttributeBinding": { - "methods": [ - "create_data_attribute_binding" - ] - }, - "CreateDataTaxonomy": { - "methods": [ - "create_data_taxonomy" - ] - }, - "DeleteDataAttribute": { - "methods": [ - "delete_data_attribute" - ] - }, - "DeleteDataAttributeBinding": { - "methods": [ - "delete_data_attribute_binding" - ] - }, - "DeleteDataTaxonomy": { - "methods": [ - "delete_data_taxonomy" - ] - }, - "GetDataAttribute": { - "methods": [ - "get_data_attribute" - ] - }, - "GetDataAttributeBinding": { - "methods": [ - "get_data_attribute_binding" - ] - }, - "GetDataTaxonomy": { - "methods": [ - "get_data_taxonomy" - ] - }, - "ListDataAttributeBindings": { - "methods": [ - "list_data_attribute_bindings" - ] - }, - "ListDataAttributes": { - "methods": [ - "list_data_attributes" - ] - }, - "ListDataTaxonomies": { - "methods": [ - "list_data_taxonomies" - ] - }, - "UpdateDataAttribute": { - "methods": [ - "update_data_attribute" - ] - }, - "UpdateDataAttributeBinding": { - "methods": [ - "update_data_attribute_binding" - ] - }, - "UpdateDataTaxonomy": { - "methods": [ - "update_data_taxonomy" - ] - } - } - } - } - }, - "DataplexService": { - "clients": { - "grpc": { - "libraryClient": "DataplexServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateAsset": { - "methods": [ - "create_asset" - ] - }, - "CreateEnvironment": { - "methods": [ - "create_environment" - ] - }, - "CreateLake": { - "methods": [ - "create_lake" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "CreateZone": { - "methods": [ - "create_zone" - ] - }, - "DeleteAsset": { - "methods": [ - "delete_asset" - ] - }, - "DeleteEnvironment": { - "methods": [ - "delete_environment" - ] - }, - "DeleteLake": { - "methods": [ - "delete_lake" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "DeleteZone": { - "methods": [ - "delete_zone" - ] - }, - "GetAsset": { - "methods": [ - "get_asset" - ] - }, - "GetEnvironment": { - "methods": [ - "get_environment" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetLake": { - "methods": [ - "get_lake" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "GetZone": { - "methods": [ - "get_zone" - ] - }, - "ListAssetActions": { - "methods": [ - "list_asset_actions" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListEnvironments": { - "methods": [ - "list_environments" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListLakeActions": { - "methods": [ - "list_lake_actions" - ] - }, - "ListLakes": { - "methods": [ - "list_lakes" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "ListZoneActions": { - "methods": [ - "list_zone_actions" - ] - }, - "ListZones": { - "methods": [ - "list_zones" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "UpdateAsset": { - "methods": [ - "update_asset" - ] - }, - "UpdateEnvironment": { - "methods": [ - "update_environment" - ] - }, - "UpdateLake": { - "methods": [ - "update_lake" - ] - }, - "UpdateTask": { - "methods": [ - "update_task" - ] - }, - "UpdateZone": { - "methods": [ - "update_zone" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataplexServiceAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateAsset": { - "methods": [ - "create_asset" - ] - }, - "CreateEnvironment": { - "methods": [ - "create_environment" - ] - }, - "CreateLake": { - "methods": [ - "create_lake" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "CreateZone": { - "methods": [ - "create_zone" - ] - }, - "DeleteAsset": { - "methods": [ - "delete_asset" - ] - }, - "DeleteEnvironment": { - "methods": [ - "delete_environment" - ] - }, - "DeleteLake": { - "methods": [ - "delete_lake" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "DeleteZone": { - "methods": [ - "delete_zone" - ] - }, - "GetAsset": { - "methods": [ - "get_asset" - ] - }, - "GetEnvironment": { - "methods": [ - "get_environment" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetLake": { - "methods": [ - "get_lake" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "GetZone": { - "methods": [ - "get_zone" - ] - }, - "ListAssetActions": { - "methods": [ - "list_asset_actions" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListEnvironments": { - "methods": [ - "list_environments" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListLakeActions": { - "methods": [ - "list_lake_actions" - ] - }, - "ListLakes": { - "methods": [ - "list_lakes" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "ListZoneActions": { - "methods": [ - "list_zone_actions" - ] - }, - "ListZones": { - "methods": [ - "list_zones" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "UpdateAsset": { - "methods": [ - "update_asset" - ] - }, - "UpdateEnvironment": { - "methods": [ - "update_environment" - ] - }, - "UpdateLake": { - "methods": [ - "update_lake" - ] - }, - "UpdateTask": { - "methods": [ - "update_task" - ] - }, - "UpdateZone": { - "methods": [ - "update_zone" - ] - } - } - } - } - }, - "MetadataService": { - "clients": { - "grpc": { - "libraryClient": "MetadataServiceClient", - "rpcs": { - "CreateEntity": { - "methods": [ - "create_entity" - ] - }, - "CreatePartition": { - "methods": [ - "create_partition" - ] - }, - "DeleteEntity": { - "methods": [ - "delete_entity" - ] - }, - "DeletePartition": { - "methods": [ - "delete_partition" - ] - }, - "GetEntity": { - "methods": [ - "get_entity" - ] - }, - "GetPartition": { - "methods": [ - "get_partition" - ] - }, - "ListEntities": { - "methods": [ - "list_entities" - ] - }, - "ListPartitions": { - "methods": [ - "list_partitions" - ] - }, - "UpdateEntity": { - "methods": [ - "update_entity" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetadataServiceAsyncClient", - "rpcs": { - "CreateEntity": { - "methods": [ - "create_entity" - ] - }, - "CreatePartition": { - "methods": [ - "create_partition" - ] - }, - "DeleteEntity": { - "methods": [ - "delete_entity" - ] - }, - "DeletePartition": { - "methods": [ - "delete_partition" - ] - }, - "GetEntity": { - "methods": [ - "get_entity" - ] - }, - "GetPartition": { - "methods": [ - "get_partition" - ] - }, - "ListEntities": { - "methods": [ - "list_entities" - ] - }, - "ListPartitions": { - "methods": [ - "list_partitions" - ] - }, - "UpdateEntity": { - "methods": [ - "update_entity" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed deleted file mode 100644 index c932c263028e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py deleted file mode 100644 index 91f4e026ba8c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CatalogServiceClient -from .async_client import CatalogServiceAsyncClient - -__all__ = ( - 'CatalogServiceClient', - 'CatalogServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py deleted file mode 100644 index dfe5af31637d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ /dev/null @@ -1,3652 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport -from .client import CatalogServiceClient - - -class CatalogServiceAsyncClient: - """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - """ - - _client: CatalogServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = CatalogServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = CatalogServiceClient._DEFAULT_UNIVERSE - - aspect_type_path = staticmethod(CatalogServiceClient.aspect_type_path) - parse_aspect_type_path = staticmethod(CatalogServiceClient.parse_aspect_type_path) - entry_path = staticmethod(CatalogServiceClient.entry_path) - parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) - entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) - parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) - entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) - parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) - metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) - parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) - common_billing_account_path = staticmethod(CatalogServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CatalogServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CatalogServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(CatalogServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(CatalogServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(CatalogServiceClient.parse_common_organization_path) - common_project_path = staticmethod(CatalogServiceClient.common_project_path) - parse_common_project_path = staticmethod(CatalogServiceClient.parse_common_project_path) - common_location_path = staticmethod(CatalogServiceClient.common_location_path) - parse_common_location_path = staticmethod(CatalogServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceAsyncClient: The constructed client. - """ - return CatalogServiceClient.from_service_account_info.__func__(CatalogServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceAsyncClient: The constructed client. - """ - return CatalogServiceClient.from_service_account_file.__func__(CatalogServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CatalogServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CatalogServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CatalogServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = CatalogServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the catalog service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CatalogServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CatalogServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_entry_type(self, - request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_type: Optional[catalog.EntryType] = None, - entry_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): - The request object. Create EntryType Request. - parent (:class:`str`): - Required. The resource name of the EntryType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type_id (:class:`str`): - Required. EntryType identifier. - This corresponds to the ``entry_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_type, entry_type_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryTypeRequest): - request = catalog.CreateEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_type is not None: - request.entry_type = entry_type - if entry_type_id is not None: - request.entry_type_id = entry_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_entry_type(self, - request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, - *, - entry_type: Optional[catalog.EntryType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): - The request object. Update EntryType Request. - entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_type, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryTypeRequest): - request = catalog.UpdateEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_type is not None: - request.entry_type = entry_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_type.name", request.entry_type.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_entry_type(self, - request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delele EntryType Request. - name (:class:`str`): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryTypeRequest): - request = catalog.DeleteEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_entry_types(self, - request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryTypesAsyncPager: - r"""Lists EntryType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]]): - The request object. List EntryTypes request - parent (:class:`str`): - Required. The resource name of the EntryType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: - List EntryTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryTypesRequest): - request = catalog.ListEntryTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryTypesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_type(self, - request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.EntryType: - r"""Gets an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): - The request object. Get EntryType request. - name (:class:`str`): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.EntryType: - Entry Type is a template for creating - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryTypeRequest): - request = catalog.GetEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_aspect_type(self, - request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - aspect_type: Optional[catalog.AspectType] = None, - aspect_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): - The request object. Create AspectType Request. - parent (:class:`str`): - Required. The resource name of the AspectType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource. - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type_id (:class:`str`): - Required. AspectType identifier. - This corresponds to the ``aspect_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, aspect_type, aspect_type_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateAspectTypeRequest): - request = catalog.CreateAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if aspect_type is not None: - request.aspect_type = aspect_type - if aspect_type_id is not None: - request.aspect_type_id = aspect_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_aspect_type(self, - request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, - *, - aspect_type: Optional[catalog.AspectType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]]): - The request object. Update AspectType Request - aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([aspect_type, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateAspectTypeRequest): - request = catalog.UpdateAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if aspect_type is not None: - request.aspect_type = aspect_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("aspect_type.name", request.aspect_type.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_aspect_type(self, - request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delele AspectType Request. - name (:class:`str`): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteAspectTypeRequest): - request = catalog.DeleteAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_aspect_types(self, - request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAspectTypesAsyncPager: - r"""Lists AspectType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): - The request object. List AspectTypes request. - parent (:class:`str`): - Required. The resource name of the AspectType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: - List AspectTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListAspectTypesRequest): - request = catalog.ListAspectTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_aspect_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAspectTypesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_aspect_type(self, - request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.AspectType: - r"""Gets an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_aspect_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): - The request object. Get AspectType request. - name (:class:`str`): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.AspectType: - AspectType is a template for creating - Aspects, and represents the JSON-schema - for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetAspectTypeRequest): - request = catalog.GetAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry_group(self, - request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group: Optional[catalog.EntryGroup] = None, - entry_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): - The request object. Create EntryGroup Request. - parent (:class:`str`): - Required. The resource name of the entryGroup, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (:class:`str`): - Required. EntryGroup identifier. - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group, entry_group_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryGroupRequest): - request = catalog.CreateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group is not None: - request.entry_group = entry_group - if entry_group_id is not None: - request.entry_group_id = entry_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_entry_group(self, - request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[catalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): - The request object. Update EntryGroup Request. - entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryGroupRequest): - request = catalog.UpdateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_entry_group(self, - request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): - The request object. Delete EntryGroup Request. - name (:class:`str`): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryGroupRequest): - request = catalog.DeleteEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_entry_groups(self, - request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsAsyncPager: - r"""Lists EntryGroup resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]]): - The request object. List entryGroups request. - parent (:class:`str`): - Required. The resource name of the entryGroup location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: - List entry groups response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryGroupsRequest): - request = catalog.ListEntryGroupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryGroupsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_group(self, - request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]]): - The request object. Get EntryGroup request. - name (:class:`str`): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.EntryGroup: - An Entry Group represents a logical - grouping of one or more Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryGroupRequest): - request = catalog.GetEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry(self, - request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry: Optional[catalog.Entry] = None, - entry_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Creates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): - The request object. Create Entry request. - parent (:class:`str`): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (:class:`google.cloud.dataplex_v1.types.Entry`): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (:class:`str`): - Required. Entry identifier. It has to be unique within - an Entry Group. - - Entries corresponding to Google Cloud resources use an - Entry ID format based on `full resource - names `__. - The format is a full resource name of the resource - without the prefix double slashes in the API service - name part of the full resource name. This allows - retrieval of entries using their associated resource - name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or - systems other than Google Cloud. - - The maximum size of the field is 4000 characters. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry, entry_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryRequest): - request = catalog.CreateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry is not None: - request.entry = entry - if entry_id is not None: - request.entry_id = entry_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry(self, - request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[catalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Updates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): - The request object. Update Entry request. - entry (:class:`google.cloud.dataplex_v1.types.Entry`): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Mask of fields to update. To update Aspects, - the update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryRequest): - request = catalog.UpdateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry(self, - request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Deletes an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): - The request object. Delete Entry request. - name (:class:`str`): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryRequest): - request = catalog.DeleteEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entries(self, - request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesAsyncPager: - r"""Lists Entries within an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): - The request object. List Entries request. - parent (:class:`str`): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: - List Entries response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntriesRequest): - request = catalog.ListEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntriesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry(self, - request: Optional[Union[catalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Gets an Entry. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): - The request object. Get Entry request. - name (:class:`str`): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryRequest): - request = catalog.GetEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def lookup_entry(self, - request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Looks up a single Entry by name using the permission on the - source system. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): - The request object. Lookup Entry request using - permissions in the source system. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.LookupEntryRequest): - request = catalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.lookup_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_entries(self, - request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, - *, - name: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchEntriesAsyncPager: - r"""Searches for Entries matching the given query and - scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]]): - The request object. - name (:class:`str`): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Required. The query against which - entries in scope should be matched. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager: - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, query]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.SearchEntriesRequest): - request = catalog.SearchEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchEntriesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_metadata_job(self, - request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - metadata_job: Optional[catalog.MetadataJob] = None, - metadata_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" - metadata_job.type_ = "IMPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): - The request object. Create metadata job request. - parent (:class:`str`): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): - Required. The metadata job resource. - This corresponds to the ``metadata_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job_id (:class:`str`): - Optional. The metadata job ID. If not provided, a unique - ID is generated with the prefix ``metadata-job-``. - - This corresponds to the ``metadata_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.MetadataJob` A - metadata job resource. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metadata_job, metadata_job_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateMetadataJobRequest): - request = catalog.CreateMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metadata_job is not None: - request.metadata_job = metadata_job - if metadata_job_id is not None: - request.metadata_job_id = metadata_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.MetadataJob, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_metadata_job(self, - request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.MetadataJob: - r"""Gets a metadata job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_metadata_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): - The request object. Get metadata job request. - name (:class:`str`): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.MetadataJob: - A metadata job resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetMetadataJobRequest): - request = catalog.GetMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_metadata_jobs(self, - request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMetadataJobsAsyncPager: - r"""Lists metadata jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): - The request object. List metadata jobs request. - parent (:class:`str`): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: - List metadata jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListMetadataJobsRequest): - request = catalog.ListMetadataJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_metadata_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMetadataJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_metadata_job(self, - request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_metadata_job(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): - The request object. Cancel metadata job request. - name (:class:`str`): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CancelMetadataJobRequest): - request = catalog.CancelMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "CatalogServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CatalogServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py deleted file mode 100644 index 1c5c93d92538..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py +++ /dev/null @@ -1,3986 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CatalogServiceGrpcTransport -from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport - - -class CatalogServiceClientMeta(type): - """Metaclass for the CatalogService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] - _transport_registry["grpc"] = CatalogServiceGrpcTransport - _transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CatalogServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CatalogServiceClient(metaclass=CatalogServiceClientMeta): - """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CatalogServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CatalogServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def aspect_type_path(project: str,location: str,aspect_type: str,) -> str: - """Returns a fully-qualified aspect_type string.""" - return "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) - - @staticmethod - def parse_aspect_type_path(path: str) -> Dict[str,str]: - """Parses a aspect_type path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/aspectTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: - """Returns a fully-qualified entry string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - - @staticmethod - def parse_entry_path(path: str) -> Dict[str,str]: - """Parses a entry path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_group_path(project: str,location: str,entry_group: str,) -> str: - """Returns a fully-qualified entry_group string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - - @staticmethod - def parse_entry_group_path(path: str) -> Dict[str,str]: - """Parses a entry_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_type_path(project: str,location: str,entry_type: str,) -> str: - """Returns a fully-qualified entry_type string.""" - return "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) - - @staticmethod - def parse_entry_type_path(path: str) -> Dict[str,str]: - """Parses a entry_type path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def metadata_job_path(project: str,location: str,metadataJob: str,) -> str: - """Returns a fully-qualified metadata_job string.""" - return "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) - - @staticmethod - def parse_metadata_job_path(path: str) -> Dict[str,str]: - """Parses a metadata_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = CatalogServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the catalog service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CatalogServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CatalogServiceClient._read_environment_variables() - self._client_cert_source = CatalogServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = CatalogServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CatalogServiceTransport) - if transport_provided: - # transport is a CatalogServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(CatalogServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - CatalogServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport]] = ( - CatalogServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., CatalogServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_entry_type(self, - request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_type: Optional[catalog.EntryType] = None, - entry_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): - The request object. Create EntryType Request. - parent (str): - Required. The resource name of the EntryType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type_id (str): - Required. EntryType identifier. - This corresponds to the ``entry_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_type, entry_type_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryTypeRequest): - request = catalog.CreateEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_type is not None: - request.entry_type = entry_type - if entry_type_id is not None: - request.entry_type_id = entry_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_entry_type(self, - request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, - *, - entry_type: Optional[catalog.EntryType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): - The request object. Update EntryType Request. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_type, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryTypeRequest): - request = catalog.UpdateEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_type is not None: - request.entry_type = entry_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_type.name", request.entry_type.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_entry_type(self, - request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delele EntryType Request. - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryTypeRequest): - request = catalog.DeleteEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_entry_types(self, - request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryTypesPager: - r"""Lists EntryType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]): - The request object. List EntryTypes request - parent (str): - Required. The resource name of the EntryType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: - List EntryTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryTypesRequest): - request = catalog.ListEntryTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryTypesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_type(self, - request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.EntryType: - r"""Gets an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): - The request object. Get EntryType request. - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.EntryType: - Entry Type is a template for creating - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryTypeRequest): - request = catalog.GetEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_aspect_type(self, - request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - aspect_type: Optional[catalog.AspectType] = None, - aspect_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): - The request object. Create AspectType Request. - parent (str): - Required. The resource name of the AspectType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource. - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type_id (str): - Required. AspectType identifier. - This corresponds to the ``aspect_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, aspect_type, aspect_type_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateAspectTypeRequest): - request = catalog.CreateAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if aspect_type is not None: - request.aspect_type = aspect_type - if aspect_type_id is not None: - request.aspect_type_id = aspect_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_aspect_type(self, - request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, - *, - aspect_type: Optional[catalog.AspectType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]): - The request object. Update AspectType Request - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([aspect_type, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateAspectTypeRequest): - request = catalog.UpdateAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if aspect_type is not None: - request.aspect_type = aspect_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("aspect_type.name", request.aspect_type.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_aspect_type(self, - request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delele AspectType Request. - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteAspectTypeRequest): - request = catalog.DeleteAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_aspect_types(self, - request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAspectTypesPager: - r"""Lists AspectType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): - The request object. List AspectTypes request. - parent (str): - Required. The resource name of the AspectType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: - List AspectTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListAspectTypesRequest): - request = catalog.ListAspectTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_aspect_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAspectTypesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_aspect_type(self, - request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.AspectType: - r"""Gets an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_aspect_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): - The request object. Get AspectType request. - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.AspectType: - AspectType is a template for creating - Aspects, and represents the JSON-schema - for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetAspectTypeRequest): - request = catalog.GetAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry_group(self, - request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group: Optional[catalog.EntryGroup] = None, - entry_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): - The request object. Create EntryGroup Request. - parent (str): - Required. The resource name of the entryGroup, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (str): - Required. EntryGroup identifier. - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group, entry_group_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryGroupRequest): - request = catalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group is not None: - request.entry_group = entry_group - if entry_group_id is not None: - request.entry_group_id = entry_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_entry_group(self, - request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[catalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): - The request object. Update EntryGroup Request. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryGroupRequest): - request = catalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_entry_group(self, - request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): - The request object. Delete EntryGroup Request. - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryGroupRequest): - request = catalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_entry_groups(self, - request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsPager: - r"""Lists EntryGroup resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]): - The request object. List entryGroups request. - parent (str): - Required. The resource name of the entryGroup location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: - List entry groups response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryGroupsRequest): - request = catalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryGroupsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_group(self, - request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]): - The request object. Get EntryGroup request. - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.EntryGroup: - An Entry Group represents a logical - grouping of one or more Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryGroupRequest): - request = catalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry(self, - request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry: Optional[catalog.Entry] = None, - entry_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Creates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): - The request object. Create Entry request. - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (str): - Required. Entry identifier. It has to be unique within - an Entry Group. - - Entries corresponding to Google Cloud resources use an - Entry ID format based on `full resource - names `__. - The format is a full resource name of the resource - without the prefix double slashes in the API service - name part of the full resource name. This allows - retrieval of entries using their associated resource - name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or - systems other than Google Cloud. - - The maximum size of the field is 4000 characters. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry, entry_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryRequest): - request = catalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry is not None: - request.entry = entry - if entry_id is not None: - request.entry_id = entry_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry(self, - request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[catalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Updates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): - The request object. Update Entry request. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. To update Aspects, - the update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryRequest): - request = catalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry(self, - request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Deletes an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = client.delete_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): - The request object. Delete Entry request. - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryRequest): - request = catalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entries(self, - request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesPager: - r"""Lists Entries within an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): - The request object. List Entries request. - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: - List Entries response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntriesRequest): - request = catalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntriesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry(self, - request: Optional[Union[catalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Gets an Entry. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): - The request object. Get Entry request. - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryRequest): - request = catalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def lookup_entry(self, - request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.Entry: - r"""Looks up a single Entry by name using the permission on the - source system. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): - The request object. Lookup Entry request using - permissions in the source system. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.LookupEntryRequest): - request = catalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lookup_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_entries(self, - request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, - *, - name: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchEntriesPager: - r"""Searches for Entries matching the given query and - scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]): - The request object. - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Required. The query against which - entries in scope should be matched. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager: - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, query]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.SearchEntriesRequest): - request = catalog.SearchEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchEntriesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_metadata_job(self, - request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - metadata_job: Optional[catalog.MetadataJob] = None, - metadata_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" - metadata_job.type_ = "IMPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): - The request object. Create metadata job request. - parent (str): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job (google.cloud.dataplex_v1.types.MetadataJob): - Required. The metadata job resource. - This corresponds to the ``metadata_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job_id (str): - Optional. The metadata job ID. If not provided, a unique - ID is generated with the prefix ``metadata-job-``. - - This corresponds to the ``metadata_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.MetadataJob` A - metadata job resource. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metadata_job, metadata_job_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateMetadataJobRequest): - request = catalog.CreateMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metadata_job is not None: - request.metadata_job = metadata_job - if metadata_job_id is not None: - request.metadata_job_id = metadata_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.MetadataJob, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_metadata_job(self, - request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> catalog.MetadataJob: - r"""Gets a metadata job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_metadata_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): - The request object. Get metadata job request. - name (str): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.MetadataJob: - A metadata job resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetMetadataJobRequest): - request = catalog.GetMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_metadata_jobs(self, - request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMetadataJobsPager: - r"""Lists metadata jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): - The request object. List metadata jobs request. - parent (str): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: - List metadata jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListMetadataJobsRequest): - request = catalog.ListMetadataJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMetadataJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_metadata_job(self, - request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_metadata_job(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): - The request object. Cancel metadata job request. - name (str): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CancelMetadataJobRequest): - request = catalog.CancelMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "CatalogServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CatalogServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py deleted file mode 100644 index 224b831f513a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py +++ /dev/null @@ -1,837 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import catalog - - -class ListEntryTypesPager: - """A pager for iterating through ``list_entry_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryTypes`` requests and continue to iterate - through the ``entry_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntryTypesResponse], - request: catalog.ListEntryTypesRequest, - response: catalog.ListEntryTypesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntryTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntryTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.EntryType]: - for page in self.pages: - yield from page.entry_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryTypesAsyncPager: - """A pager for iterating through ``list_entry_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryTypes`` requests and continue to iterate - through the ``entry_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntryTypesResponse]], - request: catalog.ListEntryTypesRequest, - response: catalog.ListEntryTypesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntryTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntryTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.EntryType]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAspectTypesPager: - """A pager for iterating through ``list_aspect_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``aspect_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAspectTypes`` requests and continue to iterate - through the ``aspect_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListAspectTypesResponse], - request: catalog.ListAspectTypesRequest, - response: catalog.ListAspectTypesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListAspectTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListAspectTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.AspectType]: - for page in self.pages: - yield from page.aspect_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAspectTypesAsyncPager: - """A pager for iterating through ``list_aspect_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``aspect_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAspectTypes`` requests and continue to iterate - through the ``aspect_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListAspectTypesResponse]], - request: catalog.ListAspectTypesRequest, - response: catalog.ListAspectTypesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListAspectTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListAspectTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.AspectType]: - async def async_generator(): - async for page in self.pages: - for response in page.aspect_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntryGroupsResponse], - request: catalog.ListEntryGroupsRequest, - response: catalog.ListEntryGroupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntryGroupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.EntryGroup]: - for page in self.pages: - yield from page.entry_groups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsAsyncPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntryGroupsResponse]], - request: catalog.ListEntryGroupsRequest, - response: catalog.ListEntryGroupsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntryGroupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.EntryGroup]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_groups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntriesResponse], - request: catalog.ListEntriesRequest, - response: catalog.ListEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntriesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.Entry]: - for page in self.pages: - yield from page.entries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesAsyncPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntriesResponse]], - request: catalog.ListEntriesRequest, - response: catalog.ListEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntriesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.Entry]: - async def async_generator(): - async for page in self.pages: - for response in page.entries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchEntriesPager: - """A pager for iterating through ``search_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchEntries`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.SearchEntriesResponse], - request: catalog.SearchEntriesRequest, - response: catalog.SearchEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.SearchEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.SearchEntriesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.SearchEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.SearchEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.SearchEntriesResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchEntriesAsyncPager: - """A pager for iterating through ``search_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchEntries`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.SearchEntriesResponse]], - request: catalog.SearchEntriesRequest, - response: catalog.SearchEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.SearchEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.SearchEntriesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.SearchEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.SearchEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.SearchEntriesResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetadataJobsPager: - """A pager for iterating through ``list_metadata_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``metadata_jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMetadataJobs`` requests and continue to iterate - through the ``metadata_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListMetadataJobsResponse], - request: catalog.ListMetadataJobsRequest, - response: catalog.ListMetadataJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListMetadataJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.MetadataJob]: - for page in self.pages: - yield from page.metadata_jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetadataJobsAsyncPager: - """A pager for iterating through ``list_metadata_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``metadata_jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMetadataJobs`` requests and continue to iterate - through the ``metadata_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], - request: catalog.ListMetadataJobsRequest, - response: catalog.ListMetadataJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = catalog.ListMetadataJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: - async def async_generator(): - async for page in self.pages: - for response in page.metadata_jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst deleted file mode 100644 index c14dcbeef235..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`CatalogServiceTransport` is the ABC for all transports. -- public child `CatalogServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `CatalogServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseCatalogServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `CatalogServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py deleted file mode 100644 index 7e6b046ff29f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CatalogServiceTransport -from .grpc import CatalogServiceGrpcTransport -from .grpc_asyncio import CatalogServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] -_transport_registry['grpc'] = CatalogServiceGrpcTransport -_transport_registry['grpc_asyncio'] = CatalogServiceGrpcAsyncIOTransport - -__all__ = ( - 'CatalogServiceTransport', - 'CatalogServiceGrpcTransport', - 'CatalogServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py deleted file mode 100644 index dc0255a4f11a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ /dev/null @@ -1,707 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class CatalogServiceTransport(abc.ABC): - """Abstract transport class for CatalogService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_entry_type: gapic_v1.method.wrap_method( - self.create_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_type: gapic_v1.method.wrap_method( - self.update_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_type: gapic_v1.method.wrap_method( - self.delete_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_types: gapic_v1.method.wrap_method( - self.list_entry_types, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_type: gapic_v1.method.wrap_method( - self.get_entry_type, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_aspect_type: gapic_v1.method.wrap_method( - self.create_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_aspect_type: gapic_v1.method.wrap_method( - self.update_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_aspect_type: gapic_v1.method.wrap_method( - self.delete_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_aspect_types: gapic_v1.method.wrap_method( - self.list_aspect_types, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_aspect_type: gapic_v1.method.wrap_method( - self.get_aspect_type, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_group: gapic_v1.method.wrap_method( - self.delete_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_groups: gapic_v1.method.wrap_method( - self.list_entry_groups, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_group: gapic_v1.method.wrap_method( - self.get_entry_group, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry: gapic_v1.method.wrap_method( - self.delete_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entries: gapic_v1.method.wrap_method( - self.list_entries, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_entry: gapic_v1.method.wrap_method( - self.get_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.lookup_entry: gapic_v1.method.wrap_method( - self.lookup_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.search_entries: gapic_v1.method.wrap_method( - self.search_entries, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_metadata_job: gapic_v1.method.wrap_method( - self.create_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_metadata_job: gapic_v1.method.wrap_method( - self.get_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.list_metadata_jobs: gapic_v1.method.wrap_method( - self.list_metadata_jobs, - default_timeout=None, - client_info=client_info, - ), - self.cancel_metadata_job: gapic_v1.method.wrap_method( - self.cancel_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - Union[ - catalog.ListEntryTypesResponse, - Awaitable[catalog.ListEntryTypesResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - Union[ - catalog.EntryType, - Awaitable[catalog.EntryType] - ]]: - raise NotImplementedError() - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - Union[ - catalog.ListAspectTypesResponse, - Awaitable[catalog.ListAspectTypesResponse] - ]]: - raise NotImplementedError() - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - Union[ - catalog.AspectType, - Awaitable[catalog.AspectType] - ]]: - raise NotImplementedError() - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - Union[ - catalog.ListEntryGroupsResponse, - Awaitable[catalog.ListEntryGroupsResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - Union[ - catalog.EntryGroup, - Awaitable[catalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - Union[ - catalog.ListEntriesResponse, - Awaitable[catalog.ListEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - Union[ - catalog.SearchEntriesResponse, - Awaitable[catalog.SearchEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - Union[ - catalog.MetadataJob, - Awaitable[catalog.MetadataJob] - ]]: - raise NotImplementedError() - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - Union[ - catalog.ListMetadataJobsResponse, - Awaitable[catalog.ListMetadataJobsResponse] - ]]: - raise NotImplementedError() - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'CatalogServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py deleted file mode 100644 index 72fef5ad3155..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ /dev/null @@ -1,1076 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO - - -class CatalogServiceGrpcTransport(CatalogServiceTransport): - """gRPC backend transport for CatalogService. - - The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create entry type method over gRPC. - - Creates an EntryType. - - Returns: - Callable[[~.CreateEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_type' not in self._stubs: - self._stubs['create_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', - request_serializer=catalog.CreateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_type'] - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update entry type method over gRPC. - - Updates an EntryType. - - Returns: - Callable[[~.UpdateEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_type' not in self._stubs: - self._stubs['update_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', - request_serializer=catalog.UpdateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_type'] - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete entry type method over gRPC. - - Deletes an EntryType. - - Returns: - Callable[[~.DeleteEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_type' not in self._stubs: - self._stubs['delete_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', - request_serializer=catalog.DeleteEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_type'] - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - catalog.ListEntryTypesResponse]: - r"""Return a callable for the list entry types method over gRPC. - - Lists EntryType resources in a project and location. - - Returns: - Callable[[~.ListEntryTypesRequest], - ~.ListEntryTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_types' not in self._stubs: - self._stubs['list_entry_types'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', - request_serializer=catalog.ListEntryTypesRequest.serialize, - response_deserializer=catalog.ListEntryTypesResponse.deserialize, - ) - return self._stubs['list_entry_types'] - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - catalog.EntryType]: - r"""Return a callable for the get entry type method over gRPC. - - Gets an EntryType. - - Returns: - Callable[[~.GetEntryTypeRequest], - ~.EntryType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_type' not in self._stubs: - self._stubs['get_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryType', - request_serializer=catalog.GetEntryTypeRequest.serialize, - response_deserializer=catalog.EntryType.deserialize, - ) - return self._stubs['get_entry_type'] - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create aspect type method over gRPC. - - Creates an AspectType. - - Returns: - Callable[[~.CreateAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_aspect_type' not in self._stubs: - self._stubs['create_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', - request_serializer=catalog.CreateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_aspect_type'] - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update aspect type method over gRPC. - - Updates an AspectType. - - Returns: - Callable[[~.UpdateAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_aspect_type' not in self._stubs: - self._stubs['update_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', - request_serializer=catalog.UpdateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_aspect_type'] - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete aspect type method over gRPC. - - Deletes an AspectType. - - Returns: - Callable[[~.DeleteAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_aspect_type' not in self._stubs: - self._stubs['delete_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', - request_serializer=catalog.DeleteAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_aspect_type'] - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - catalog.ListAspectTypesResponse]: - r"""Return a callable for the list aspect types method over gRPC. - - Lists AspectType resources in a project and location. - - Returns: - Callable[[~.ListAspectTypesRequest], - ~.ListAspectTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_aspect_types' not in self._stubs: - self._stubs['list_aspect_types'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', - request_serializer=catalog.ListAspectTypesRequest.serialize, - response_deserializer=catalog.ListAspectTypesResponse.deserialize, - ) - return self._stubs['list_aspect_types'] - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - catalog.AspectType]: - r"""Return a callable for the get aspect type method over gRPC. - - Gets an AspectType. - - Returns: - Callable[[~.GetAspectTypeRequest], - ~.AspectType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_aspect_type' not in self._stubs: - self._stubs['get_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetAspectType', - request_serializer=catalog.GetAspectTypeRequest.serialize, - response_deserializer=catalog.AspectType.deserialize, - ) - return self._stubs['get_aspect_type'] - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an EntryGroup. - - Returns: - Callable[[~.CreateEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', - request_serializer=catalog.CreateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', - request_serializer=catalog.UpdateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', - request_serializer=catalog.DeleteEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - catalog.ListEntryGroupsResponse]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists EntryGroup resources in a project and location. - - Returns: - Callable[[~.ListEntryGroupsRequest], - ~.ListEntryGroupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', - request_serializer=catalog.ListEntryGroupsRequest.serialize, - response_deserializer=catalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - catalog.EntryGroup]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', - request_serializer=catalog.GetEntryGroupRequest.serialize, - response_deserializer=catalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - catalog.Entry]: - r"""Return a callable for the create entry method over gRPC. - - Creates an Entry. - - Returns: - Callable[[~.CreateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntry', - request_serializer=catalog.CreateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - catalog.Entry]: - r"""Return a callable for the update entry method over gRPC. - - Updates an Entry. - - Returns: - Callable[[~.UpdateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', - request_serializer=catalog.UpdateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - catalog.Entry]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an Entry. - - Returns: - Callable[[~.DeleteEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', - request_serializer=catalog.DeleteEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['delete_entry'] - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - catalog.ListEntriesResponse]: - r"""Return a callable for the list entries method over gRPC. - - Lists Entries within an EntryGroup. - - Returns: - Callable[[~.ListEntriesRequest], - ~.ListEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntries', - request_serializer=catalog.ListEntriesRequest.serialize, - response_deserializer=catalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - catalog.Entry]: - r"""Return a callable for the get entry method over gRPC. - - Gets an Entry. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - Returns: - Callable[[~.GetEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntry', - request_serializer=catalog.GetEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - catalog.Entry]: - r"""Return a callable for the lookup entry method over gRPC. - - Looks up a single Entry by name using the permission on the - source system. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - Returns: - Callable[[~.LookupEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/LookupEntry', - request_serializer=catalog.LookupEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - catalog.SearchEntriesResponse]: - r"""Return a callable for the search entries method over gRPC. - - Searches for Entries matching the given query and - scope. - - Returns: - Callable[[~.SearchEntriesRequest], - ~.SearchEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_entries' not in self._stubs: - self._stubs['search_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/SearchEntries', - request_serializer=catalog.SearchEntriesRequest.serialize, - response_deserializer=catalog.SearchEntriesResponse.deserialize, - ) - return self._stubs['search_entries'] - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the create metadata job method over gRPC. - - Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. - - Returns: - Callable[[~.CreateMetadataJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metadata_job' not in self._stubs: - self._stubs['create_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', - request_serializer=catalog.CreateMetadataJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_metadata_job'] - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - catalog.MetadataJob]: - r"""Return a callable for the get metadata job method over gRPC. - - Gets a metadata job. - - Returns: - Callable[[~.GetMetadataJobRequest], - ~.MetadataJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metadata_job' not in self._stubs: - self._stubs['get_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', - request_serializer=catalog.GetMetadataJobRequest.serialize, - response_deserializer=catalog.MetadataJob.deserialize, - ) - return self._stubs['get_metadata_job'] - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - catalog.ListMetadataJobsResponse]: - r"""Return a callable for the list metadata jobs method over gRPC. - - Lists metadata jobs. - - Returns: - Callable[[~.ListMetadataJobsRequest], - ~.ListMetadataJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metadata_jobs' not in self._stubs: - self._stubs['list_metadata_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', - request_serializer=catalog.ListMetadataJobsRequest.serialize, - response_deserializer=catalog.ListMetadataJobsResponse.deserialize, - ) - return self._stubs['list_metadata_jobs'] - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel metadata job method over gRPC. - - Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - Returns: - Callable[[~.CancelMetadataJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_metadata_job' not in self._stubs: - self._stubs['cancel_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', - request_serializer=catalog.CancelMetadataJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_metadata_job'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'CatalogServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py deleted file mode 100644 index 20c5361626d6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1362 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import CatalogServiceGrpcTransport - - -class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): - """gRPC AsyncIO backend transport for CatalogService. - - The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create entry type method over gRPC. - - Creates an EntryType. - - Returns: - Callable[[~.CreateEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_type' not in self._stubs: - self._stubs['create_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', - request_serializer=catalog.CreateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_type'] - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update entry type method over gRPC. - - Updates an EntryType. - - Returns: - Callable[[~.UpdateEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_type' not in self._stubs: - self._stubs['update_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', - request_serializer=catalog.UpdateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_type'] - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete entry type method over gRPC. - - Deletes an EntryType. - - Returns: - Callable[[~.DeleteEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_type' not in self._stubs: - self._stubs['delete_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', - request_serializer=catalog.DeleteEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_type'] - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - Awaitable[catalog.ListEntryTypesResponse]]: - r"""Return a callable for the list entry types method over gRPC. - - Lists EntryType resources in a project and location. - - Returns: - Callable[[~.ListEntryTypesRequest], - Awaitable[~.ListEntryTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_types' not in self._stubs: - self._stubs['list_entry_types'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', - request_serializer=catalog.ListEntryTypesRequest.serialize, - response_deserializer=catalog.ListEntryTypesResponse.deserialize, - ) - return self._stubs['list_entry_types'] - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - Awaitable[catalog.EntryType]]: - r"""Return a callable for the get entry type method over gRPC. - - Gets an EntryType. - - Returns: - Callable[[~.GetEntryTypeRequest], - Awaitable[~.EntryType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_type' not in self._stubs: - self._stubs['get_entry_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryType', - request_serializer=catalog.GetEntryTypeRequest.serialize, - response_deserializer=catalog.EntryType.deserialize, - ) - return self._stubs['get_entry_type'] - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create aspect type method over gRPC. - - Creates an AspectType. - - Returns: - Callable[[~.CreateAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_aspect_type' not in self._stubs: - self._stubs['create_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', - request_serializer=catalog.CreateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_aspect_type'] - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update aspect type method over gRPC. - - Updates an AspectType. - - Returns: - Callable[[~.UpdateAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_aspect_type' not in self._stubs: - self._stubs['update_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', - request_serializer=catalog.UpdateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_aspect_type'] - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete aspect type method over gRPC. - - Deletes an AspectType. - - Returns: - Callable[[~.DeleteAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_aspect_type' not in self._stubs: - self._stubs['delete_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', - request_serializer=catalog.DeleteAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_aspect_type'] - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - Awaitable[catalog.ListAspectTypesResponse]]: - r"""Return a callable for the list aspect types method over gRPC. - - Lists AspectType resources in a project and location. - - Returns: - Callable[[~.ListAspectTypesRequest], - Awaitable[~.ListAspectTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_aspect_types' not in self._stubs: - self._stubs['list_aspect_types'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', - request_serializer=catalog.ListAspectTypesRequest.serialize, - response_deserializer=catalog.ListAspectTypesResponse.deserialize, - ) - return self._stubs['list_aspect_types'] - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - Awaitable[catalog.AspectType]]: - r"""Return a callable for the get aspect type method over gRPC. - - Gets an AspectType. - - Returns: - Callable[[~.GetAspectTypeRequest], - Awaitable[~.AspectType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_aspect_type' not in self._stubs: - self._stubs['get_aspect_type'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetAspectType', - request_serializer=catalog.GetAspectTypeRequest.serialize, - response_deserializer=catalog.AspectType.deserialize, - ) - return self._stubs['get_aspect_type'] - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an EntryGroup. - - Returns: - Callable[[~.CreateEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', - request_serializer=catalog.CreateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', - request_serializer=catalog.UpdateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', - request_serializer=catalog.DeleteEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - Awaitable[catalog.ListEntryGroupsResponse]]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists EntryGroup resources in a project and location. - - Returns: - Callable[[~.ListEntryGroupsRequest], - Awaitable[~.ListEntryGroupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', - request_serializer=catalog.ListEntryGroupsRequest.serialize, - response_deserializer=catalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - Awaitable[catalog.EntryGroup]]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', - request_serializer=catalog.GetEntryGroupRequest.serialize, - response_deserializer=catalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the create entry method over gRPC. - - Creates an Entry. - - Returns: - Callable[[~.CreateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntry', - request_serializer=catalog.CreateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the update entry method over gRPC. - - Updates an Entry. - - Returns: - Callable[[~.UpdateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', - request_serializer=catalog.UpdateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an Entry. - - Returns: - Callable[[~.DeleteEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', - request_serializer=catalog.DeleteEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['delete_entry'] - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - Awaitable[catalog.ListEntriesResponse]]: - r"""Return a callable for the list entries method over gRPC. - - Lists Entries within an EntryGroup. - - Returns: - Callable[[~.ListEntriesRequest], - Awaitable[~.ListEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntries', - request_serializer=catalog.ListEntriesRequest.serialize, - response_deserializer=catalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the get entry method over gRPC. - - Gets an Entry. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - Returns: - Callable[[~.GetEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntry', - request_serializer=catalog.GetEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the lookup entry method over gRPC. - - Looks up a single Entry by name using the permission on the - source system. - - **Caution**: The BigQuery metadata that is stored in Dataplex - Catalog is changing. For more information, see `Changes to - BigQuery metadata stored in Dataplex - Catalog `__. - - Returns: - Callable[[~.LookupEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/LookupEntry', - request_serializer=catalog.LookupEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - Awaitable[catalog.SearchEntriesResponse]]: - r"""Return a callable for the search entries method over gRPC. - - Searches for Entries matching the given query and - scope. - - Returns: - Callable[[~.SearchEntriesRequest], - Awaitable[~.SearchEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_entries' not in self._stubs: - self._stubs['search_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/SearchEntries', - request_serializer=catalog.SearchEntriesRequest.serialize, - response_deserializer=catalog.SearchEntriesResponse.deserialize, - ) - return self._stubs['search_entries'] - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create metadata job method over gRPC. - - Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. - - Returns: - Callable[[~.CreateMetadataJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metadata_job' not in self._stubs: - self._stubs['create_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', - request_serializer=catalog.CreateMetadataJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_metadata_job'] - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - Awaitable[catalog.MetadataJob]]: - r"""Return a callable for the get metadata job method over gRPC. - - Gets a metadata job. - - Returns: - Callable[[~.GetMetadataJobRequest], - Awaitable[~.MetadataJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metadata_job' not in self._stubs: - self._stubs['get_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', - request_serializer=catalog.GetMetadataJobRequest.serialize, - response_deserializer=catalog.MetadataJob.deserialize, - ) - return self._stubs['get_metadata_job'] - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - Awaitable[catalog.ListMetadataJobsResponse]]: - r"""Return a callable for the list metadata jobs method over gRPC. - - Lists metadata jobs. - - Returns: - Callable[[~.ListMetadataJobsRequest], - Awaitable[~.ListMetadataJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metadata_jobs' not in self._stubs: - self._stubs['list_metadata_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', - request_serializer=catalog.ListMetadataJobsRequest.serialize, - response_deserializer=catalog.ListMetadataJobsResponse.deserialize, - ) - return self._stubs['list_metadata_jobs'] - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel metadata job method over gRPC. - - Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - Returns: - Callable[[~.CancelMetadataJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_metadata_job' not in self._stubs: - self._stubs['cancel_metadata_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', - request_serializer=catalog.CancelMetadataJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_metadata_job'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_entry_type: self._wrap_method( - self.create_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_type: self._wrap_method( - self.update_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_type: self._wrap_method( - self.delete_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_types: self._wrap_method( - self.list_entry_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_type: self._wrap_method( - self.get_entry_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_aspect_type: self._wrap_method( - self.create_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_aspect_type: self._wrap_method( - self.update_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_aspect_type: self._wrap_method( - self.delete_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_aspect_types: self._wrap_method( - self.list_aspect_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_aspect_type: self._wrap_method( - self.get_aspect_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry_group: self._wrap_method( - self.create_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_group: self._wrap_method( - self.update_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_group: self._wrap_method( - self.delete_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_groups: self._wrap_method( - self.list_entry_groups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_group: self._wrap_method( - self.get_entry_group, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry: self._wrap_method( - self.create_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry: self._wrap_method( - self.update_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry: self._wrap_method( - self.delete_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entries: self._wrap_method( - self.list_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_entry: self._wrap_method( - self.get_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.lookup_entry: self._wrap_method( - self.lookup_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.search_entries: self._wrap_method( - self.search_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_metadata_job: self._wrap_method( - self.create_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_metadata_job: self._wrap_method( - self.get_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.list_metadata_jobs: self._wrap_method( - self.list_metadata_jobs, - default_timeout=None, - client_info=client_info, - ), - self.cancel_metadata_job: self._wrap_method( - self.cancel_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'CatalogServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py deleted file mode 100644 index d5c571190bc7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ContentServiceClient -from .async_client import ContentServiceAsyncClient - -__all__ = ( - 'ContentServiceClient', - 'ContentServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py deleted file mode 100644 index 0d74d046caec..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py +++ /dev/null @@ -1,1435 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport -from .client import ContentServiceClient - - -class ContentServiceAsyncClient: - """ContentService manages Notebook and SQL Scripts for Dataplex.""" - - _client: ContentServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ContentServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ContentServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ContentServiceClient._DEFAULT_UNIVERSE - - content_path = staticmethod(ContentServiceClient.content_path) - parse_content_path = staticmethod(ContentServiceClient.parse_content_path) - lake_path = staticmethod(ContentServiceClient.lake_path) - parse_lake_path = staticmethod(ContentServiceClient.parse_lake_path) - common_billing_account_path = staticmethod(ContentServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ContentServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ContentServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ContentServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ContentServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ContentServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ContentServiceClient.common_project_path) - parse_common_project_path = staticmethod(ContentServiceClient.parse_common_project_path) - common_location_path = staticmethod(ContentServiceClient.common_location_path) - parse_common_location_path = staticmethod(ContentServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceAsyncClient: The constructed client. - """ - return ContentServiceClient.from_service_account_info.__func__(ContentServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceAsyncClient: The constructed client. - """ - return ContentServiceClient.from_service_account_file.__func__(ContentServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ContentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ContentServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContentServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ContentServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the content service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContentServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ContentServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_content(self, - request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - content: Optional[analyze.Content] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Create a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = await client.create_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]]): - The request object. Create content request. - parent (:class:`str`): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (:class:`google.cloud.dataplex_v1.types.Content`): - Required. Content resource. - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, content]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.CreateContentRequest): - request = gcd_content.CreateContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if content is not None: - request.content = content - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_content(self, - request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, - *, - content: Optional[analyze.Content] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Update a content. Only supports full resource update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = await client.update_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]]): - The request object. Update content request. - content (:class:`google.cloud.dataplex_v1.types.Content`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([content, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.UpdateContentRequest): - request = gcd_content.UpdateContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if content is not None: - request.content = content - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("content.name", request.content.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_content(self, - request: Optional[Union[content.DeleteContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - await client.delete_content(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]]): - The request object. Delete content request. - name (:class:`str`): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.DeleteContentRequest): - request = content.DeleteContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_content(self, - request: Optional[Union[content.GetContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Get a content resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]]): - The request object. Get content request. - name (:class:`str`): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.GetContentRequest): - request = content.GetContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_content(self, - request: Optional[Union[content.ListContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListContentAsyncPager: - r"""List content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]]): - The request object. List content request. Returns the - BASIC Content view. - parent (:class:`str`): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: - List content response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.ListContentRequest): - request = content.ListContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListContentAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ContentServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ContentServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py deleted file mode 100644 index ff4cbd86c3cb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py +++ /dev/null @@ -1,1767 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ContentServiceGrpcTransport -from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport - - -class ContentServiceClientMeta(type): - """Metaclass for the ContentService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] - _transport_registry["grpc"] = ContentServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ContentServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ContentServiceClient(metaclass=ContentServiceClientMeta): - """ContentService manages Notebook and SQL Scripts for Dataplex.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ContentServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContentServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def content_path(project: str,location: str,lake: str,content: str,) -> str: - """Returns a fully-qualified content string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) - - @staticmethod - def parse_content_path(path: str) -> Dict[str,str]: - """Parses a content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/content/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lake_path(project: str,location: str,lake: str,) -> str: - """Returns a fully-qualified lake string.""" - return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - - @staticmethod - def parse_lake_path(path: str) -> Dict[str,str]: - """Parses a lake path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ContentServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ContentServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ContentServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the content service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContentServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ContentServiceClient._read_environment_variables() - self._client_cert_source = ContentServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ContentServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ContentServiceTransport) - if transport_provided: - # transport is a ContentServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ContentServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ContentServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ContentServiceTransport], Callable[..., ContentServiceTransport]] = ( - ContentServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ContentServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_content(self, - request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - content: Optional[analyze.Content] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Create a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = client.create_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): - The request object. Create content request. - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (google.cloud.dataplex_v1.types.Content): - Required. Content resource. - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, content]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.CreateContentRequest): - request = gcd_content.CreateContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if content is not None: - request.content = content - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_content(self, - request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, - *, - content: Optional[analyze.Content] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Update a content. Only supports full resource update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = client.update_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): - The request object. Update content request. - content (google.cloud.dataplex_v1.types.Content): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([content, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.UpdateContentRequest): - request = gcd_content.UpdateContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if content is not None: - request.content = content - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("content.name", request.content.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_content(self, - request: Optional[Union[content.DeleteContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - client.delete_content(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): - The request object. Delete content request. - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.DeleteContentRequest): - request = content.DeleteContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_content(self, - request: Optional[Union[content.GetContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Content: - r"""Get a content resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = client.get_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): - The request object. Get content request. - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.GetContentRequest): - request = content.GetContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_content(self, - request: Optional[Union[content.ListContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListContentPager: - r"""List content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): - The request object. List content request. Returns the - BASIC Content view. - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: - List content response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.ListContentRequest): - request = content.ListContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListContentPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ContentServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ContentServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py deleted file mode 100644 index 3659ec82f6fd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py +++ /dev/null @@ -1,163 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content - - -class ListContentPager: - """A pager for iterating through ``list_content`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and - provides an ``__iter__`` method to iterate through its - ``content`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListContent`` requests and continue to iterate - through the ``content`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., content.ListContentResponse], - request: content.ListContentRequest, - response: content.ListContentResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListContentRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListContentResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = content.ListContentRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[content.ListContentResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Content]: - for page in self.pages: - yield from page.content - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListContentAsyncPager: - """A pager for iterating through ``list_content`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``content`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListContent`` requests and continue to iterate - through the ``content`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[content.ListContentResponse]], - request: content.ListContentRequest, - response: content.ListContentResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListContentRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListContentResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = content.ListContentRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[content.ListContentResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Content]: - async def async_generator(): - async for page in self.pages: - for response in page.content: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst deleted file mode 100644 index f737919bf8e5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ContentServiceTransport` is the ABC for all transports. -- public child `ContentServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ContentServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseContentServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ContentServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py deleted file mode 100644 index 700cca6c0dfc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ContentServiceTransport -from .grpc import ContentServiceGrpcTransport -from .grpc_asyncio import ContentServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] -_transport_registry['grpc'] = ContentServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ContentServiceGrpcAsyncIOTransport - -__all__ = ( - 'ContentServiceTransport', - 'ContentServiceGrpcTransport', - 'ContentServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py deleted file mode 100644 index 7d77d96adb47..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py +++ /dev/null @@ -1,377 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ContentServiceTransport(abc.ABC): - """Abstract transport class for ContentService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_content: gapic_v1.method.wrap_method( - self.create_content, - default_timeout=60.0, - client_info=client_info, - ), - self.update_content: gapic_v1.method.wrap_method( - self.update_content, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_content: gapic_v1.method.wrap_method( - self.delete_content, - default_timeout=60.0, - client_info=client_info, - ), - self.get_content: gapic_v1.method.wrap_method( - self.get_content, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_content: gapic_v1.method.wrap_method( - self.list_content, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - Union[ - content.ListContentResponse, - Awaitable[content.ListContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ContentServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py deleted file mode 100644 index 54d08d185201..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ /dev/null @@ -1,587 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO - - -class ContentServiceGrpcTransport(ContentServiceTransport): - """gRPC backend transport for ContentService. - - ContentService manages Notebook and SQL Scripts for Dataplex. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - analyze.Content]: - r"""Return a callable for the create content method over gRPC. - - Create a content. - - Returns: - Callable[[~.CreateContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_content' not in self._stubs: - self._stubs['create_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/CreateContent', - request_serializer=gcd_content.CreateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['create_content'] - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - analyze.Content]: - r"""Return a callable for the update content method over gRPC. - - Update a content. Only supports full resource update. - - Returns: - Callable[[~.UpdateContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_content' not in self._stubs: - self._stubs['update_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/UpdateContent', - request_serializer=gcd_content.UpdateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['update_content'] - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete content method over gRPC. - - Delete a content. - - Returns: - Callable[[~.DeleteContentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_content' not in self._stubs: - self._stubs['delete_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/DeleteContent', - request_serializer=content.DeleteContentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_content'] - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - analyze.Content]: - r"""Return a callable for the get content method over gRPC. - - Get a content resource. - - Returns: - Callable[[~.GetContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_content' not in self._stubs: - self._stubs['get_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetContent', - request_serializer=content.GetContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['get_content'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - content.ListContentResponse]: - r"""Return a callable for the list content method over gRPC. - - List content. - - Returns: - Callable[[~.ListContentRequest], - ~.ListContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_content' not in self._stubs: - self._stubs['list_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/ListContent', - request_serializer=content.ListContentRequest.serialize, - response_deserializer=content.ListContentResponse.deserialize, - ) - return self._stubs['list_content'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ContentServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py deleted file mode 100644 index ba7088c07b69..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,709 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ContentServiceGrpcTransport - - -class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): - """gRPC AsyncIO backend transport for ContentService. - - ContentService manages Notebook and SQL Scripts for Dataplex. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the create content method over gRPC. - - Create a content. - - Returns: - Callable[[~.CreateContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_content' not in self._stubs: - self._stubs['create_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/CreateContent', - request_serializer=gcd_content.CreateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['create_content'] - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the update content method over gRPC. - - Update a content. Only supports full resource update. - - Returns: - Callable[[~.UpdateContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_content' not in self._stubs: - self._stubs['update_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/UpdateContent', - request_serializer=gcd_content.UpdateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['update_content'] - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete content method over gRPC. - - Delete a content. - - Returns: - Callable[[~.DeleteContentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_content' not in self._stubs: - self._stubs['delete_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/DeleteContent', - request_serializer=content.DeleteContentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_content'] - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the get content method over gRPC. - - Get a content resource. - - Returns: - Callable[[~.GetContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_content' not in self._stubs: - self._stubs['get_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetContent', - request_serializer=content.GetContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['get_content'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - Awaitable[content.ListContentResponse]]: - r"""Return a callable for the list content method over gRPC. - - List content. - - Returns: - Callable[[~.ListContentRequest], - Awaitable[~.ListContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_content' not in self._stubs: - self._stubs['list_content'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/ListContent', - request_serializer=content.ListContentRequest.serialize, - response_deserializer=content.ListContentResponse.deserialize, - ) - return self._stubs['list_content'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_content: self._wrap_method( - self.create_content, - default_timeout=60.0, - client_info=client_info, - ), - self.update_content: self._wrap_method( - self.update_content, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_content: self._wrap_method( - self.delete_content, - default_timeout=60.0, - client_info=client_info, - ), - self.get_content: self._wrap_method( - self.get_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_content: self._wrap_method( - self.list_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'ContentServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py deleted file mode 100644 index 1500c4168c10..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataScanServiceClient -from .async_client import DataScanServiceAsyncClient - -__all__ = ( - 'DataScanServiceClient', - 'DataScanServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py deleted file mode 100644 index d370f4f64327..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ /dev/null @@ -1,1665 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport -from .client import DataScanServiceClient - - -class DataScanServiceAsyncClient: - """DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - """ - - _client: DataScanServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataScanServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE - - connection_path = staticmethod(DataScanServiceClient.connection_path) - parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) - data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) - parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) - data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) - parse_data_scan_job_path = staticmethod(DataScanServiceClient.parse_data_scan_job_path) - dataset_path = staticmethod(DataScanServiceClient.dataset_path) - parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) - entity_path = staticmethod(DataScanServiceClient.entity_path) - parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) - common_billing_account_path = staticmethod(DataScanServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataScanServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataScanServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataScanServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataScanServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataScanServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataScanServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataScanServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataScanServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataScanServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceAsyncClient: The constructed client. - """ - return DataScanServiceClient.from_service_account_info.__func__(DataScanServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceAsyncClient: The constructed client. - """ - return DataScanServiceClient.from_service_account_file.__func__(DataScanServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataScanServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataScanServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataScanServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataScanServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data scan service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataScanServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataScanServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_data_scan(self, - request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_scan: Optional[datascans.DataScan] = None, - data_scan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]]): - The request object. Create dataScan request. - parent (:class:`str`): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): - Required. DataScan resource. - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan_id (:class:`str`): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / - location. - - This corresponds to the ``data_scan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_scan, data_scan_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.CreateDataScanRequest): - request = datascans.CreateDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_scan is not None: - request.data_scan = data_scan - if data_scan_id is not None: - request.data_scan_id = data_scan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_scan(self, - request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, - *, - data_scan: Optional[datascans.DataScan] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]]): - The request object. Update dataScan request. - data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_scan, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.UpdateDataScanRequest): - request = datascans.UpdateDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_scan is not None: - request.data_scan = data_scan - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_scan.name", request.data_scan.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_scan(self, - request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]]): - The request object. Delete dataScan request. - name (:class:`str`): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.DeleteDataScanRequest): - request = datascans.DeleteDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_data_scan(self, - request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.DataScan: - r"""Gets a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]]): - The request object. Get dataScan request. - name (:class:`str`): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataScan: - Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanRequest): - request = datascans.GetDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_scans(self, - request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataScansAsyncPager: - r"""Lists DataScans. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]]): - The request object. List dataScans request. - parent (:class:`str`): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager: - List dataScans response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScansRequest): - request = datascans.ListDataScansRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataScansAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_data_scan(self, - request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.RunDataScanResponse: - r"""Runs an on-demand execution of a DataScan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.run_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]]): - The request object. Run DataScan Request - name (:class:`str`): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - Only **OnDemand** data scans are allowed. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.RunDataScanResponse: - Run DataScan Response. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.RunDataScanRequest): - request = datascans.RunDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.run_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_scan_job(self, - request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.DataScanJob: - r"""Gets a DataScanJob resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]]): - The request object. Get DataScanJob request. - name (:class:`str`): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataScanJob: - A DataScanJob represents an instance - of DataScan execution. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanJobRequest): - request = datascans.GetDataScanJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_scan_jobs(self, - request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataScanJobsAsyncPager: - r"""Lists DataScanJobs under the given DataScan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]]): - The request object. List DataScanJobs request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager: - List DataScanJobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScanJobsRequest): - request = datascans.ListDataScanJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scan_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataScanJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def generate_data_quality_rules(self, - request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = await client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]]): - The request object. Request details for generating data - quality rule recommendations. - name (:class:`str`): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling - job (a data scan job where the job type is data - profiling) - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Response details for data quality - rule recommendations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GenerateDataQualityRulesRequest): - request = datascans.GenerateDataQualityRulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.generate_data_quality_rules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataScanServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataScanServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py deleted file mode 100644 index 750cb340c2d1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ /dev/null @@ -1,2015 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataScanServiceGrpcTransport -from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport - - -class DataScanServiceClientMeta(type): - """Metaclass for the DataScanService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] - _transport_registry["grpc"] = DataScanServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataScanServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataScanServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataScanServiceClient(metaclass=DataScanServiceClientMeta): - """DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataScanServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataScanServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_scan_path(project: str,location: str,dataScan: str,) -> str: - """Returns a fully-qualified data_scan string.""" - return "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) - - @staticmethod - def parse_data_scan_path(path: str) -> Dict[str,str]: - """Parses a data_scan path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_scan_job_path(project: str,location: str,dataScan: str,job: str,) -> str: - """Returns a fully-qualified data_scan_job string.""" - return "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) - - @staticmethod - def parse_data_scan_job_path(path: str) -> Dict[str,str]: - """Parses a data_scan_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: - """Returns a fully-qualified entity string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - - @staticmethod - def parse_entity_path(path: str) -> Dict[str,str]: - """Parses a entity path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataScanServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data scan service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataScanServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataScanServiceClient._read_environment_variables() - self._client_cert_source = DataScanServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataScanServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataScanServiceTransport) - if transport_provided: - # transport is a DataScanServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataScanServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataScanServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport]] = ( - DataScanServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataScanServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_data_scan(self, - request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_scan: Optional[datascans.DataScan] = None, - data_scan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]): - The request object. Create dataScan request. - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource. - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan_id (str): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / - location. - - This corresponds to the ``data_scan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_scan, data_scan_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.CreateDataScanRequest): - request = datascans.CreateDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_scan is not None: - request.data_scan = data_scan - if data_scan_id is not None: - request.data_scan_id = data_scan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_scan(self, - request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, - *, - data_scan: Optional[datascans.DataScan] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]): - The request object. Update dataScan request. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_scan, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.UpdateDataScanRequest): - request = datascans.UpdateDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_scan is not None: - request.data_scan = data_scan - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_scan.name", request.data_scan.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_scan(self, - request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]): - The request object. Delete dataScan request. - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.DeleteDataScanRequest): - request = datascans.DeleteDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_data_scan(self, - request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.DataScan: - r"""Gets a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]): - The request object. Get dataScan request. - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataScan: - Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data Quality: generates queries based on the rules - and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and - generates insights about the structure, content - and relationships (such as null percent, - cardinality, min/max/mean, etc). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanRequest): - request = datascans.GetDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_scans(self, - request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataScansPager: - r"""Lists DataScans. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]): - The request object. List dataScans request. - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager: - List dataScans response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScansRequest): - request = datascans.ListDataScansRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_scans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataScansPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_data_scan(self, - request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.RunDataScanResponse: - r"""Runs an on-demand execution of a DataScan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.run_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]): - The request object. Run DataScan Request - name (str): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - Only **OnDemand** data scans are allowed. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.RunDataScanResponse: - Run DataScan Response. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.RunDataScanRequest): - request = datascans.RunDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_scan_job(self, - request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.DataScanJob: - r"""Gets a DataScanJob resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]): - The request object. Get DataScanJob request. - name (str): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataScanJob: - A DataScanJob represents an instance - of DataScan execution. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanJobRequest): - request = datascans.GetDataScanJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_scan_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_scan_jobs(self, - request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataScanJobsPager: - r"""Lists DataScanJobs under the given DataScan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]): - The request object. List DataScanJobs request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager: - List DataScanJobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScanJobsRequest): - request = datascans.ListDataScanJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_scan_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataScanJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def generate_data_quality_rules(self, - request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]): - The request object. Request details for generating data - quality rule recommendations. - name (str): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling - job (a data scan job where the job type is data - profiling) - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Response details for data quality - rule recommendations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GenerateDataQualityRulesRequest): - request = datascans.GenerateDataQualityRulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.generate_data_quality_rules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataScanServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataScanServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py deleted file mode 100644 index 897a4108391e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py +++ /dev/null @@ -1,297 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import datascans - - -class ListDataScansPager: - """A pager for iterating through ``list_data_scans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_scans`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataScans`` requests and continue to iterate - through the ``data_scans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datascans.ListDataScansResponse], - request: datascans.ListDataScansRequest, - response: datascans.ListDataScansResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScansRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScansResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datascans.ListDataScansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datascans.ListDataScansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datascans.DataScan]: - for page in self.pages: - yield from page.data_scans - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScansAsyncPager: - """A pager for iterating through ``list_data_scans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_scans`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataScans`` requests and continue to iterate - through the ``data_scans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datascans.ListDataScansResponse]], - request: datascans.ListDataScansRequest, - response: datascans.ListDataScansResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScansRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScansResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datascans.ListDataScansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datascans.ListDataScansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datascans.DataScan]: - async def async_generator(): - async for page in self.pages: - for response in page.data_scans: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScanJobsPager: - """A pager for iterating through ``list_data_scan_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_scan_jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataScanJobs`` requests and continue to iterate - through the ``data_scan_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datascans.ListDataScanJobsResponse], - request: datascans.ListDataScanJobsRequest, - response: datascans.ListDataScanJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datascans.ListDataScanJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datascans.ListDataScanJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datascans.DataScanJob]: - for page in self.pages: - yield from page.data_scan_jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScanJobsAsyncPager: - """A pager for iterating through ``list_data_scan_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_scan_jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataScanJobs`` requests and continue to iterate - through the ``data_scan_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datascans.ListDataScanJobsResponse]], - request: datascans.ListDataScanJobsRequest, - response: datascans.ListDataScanJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datascans.ListDataScanJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datascans.ListDataScanJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datascans.DataScanJob]: - async def async_generator(): - async for page in self.pages: - for response in page.data_scan_jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst deleted file mode 100644 index e27965be0a36..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataScanServiceTransport` is the ABC for all transports. -- public child `DataScanServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataScanServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataScanServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataScanServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py deleted file mode 100644 index b6a6e3c5325a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataScanServiceTransport -from .grpc import DataScanServiceGrpcTransport -from .grpc_asyncio import DataScanServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] -_transport_registry['grpc'] = DataScanServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataScanServiceGrpcAsyncIOTransport - -__all__ = ( - 'DataScanServiceTransport', - 'DataScanServiceGrpcTransport', - 'DataScanServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py deleted file mode 100644 index bbdd9e2dffed..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py +++ /dev/null @@ -1,358 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataScanServiceTransport(abc.ABC): - """Abstract transport class for DataScanService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_scan: gapic_v1.method.wrap_method( - self.create_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.update_data_scan: gapic_v1.method.wrap_method( - self.update_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_scan: gapic_v1.method.wrap_method( - self.delete_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan: gapic_v1.method.wrap_method( - self.get_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scans: gapic_v1.method.wrap_method( - self.list_data_scans, - default_timeout=None, - client_info=client_info, - ), - self.run_data_scan: gapic_v1.method.wrap_method( - self.run_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan_job: gapic_v1.method.wrap_method( - self.get_data_scan_job, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scan_jobs: gapic_v1.method.wrap_method( - self.list_data_scan_jobs, - default_timeout=None, - client_info=client_info, - ), - self.generate_data_quality_rules: gapic_v1.method.wrap_method( - self.generate_data_quality_rules, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - Union[ - datascans.DataScan, - Awaitable[datascans.DataScan] - ]]: - raise NotImplementedError() - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - Union[ - datascans.ListDataScansResponse, - Awaitable[datascans.ListDataScansResponse] - ]]: - raise NotImplementedError() - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - Union[ - datascans.RunDataScanResponse, - Awaitable[datascans.RunDataScanResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - Union[ - datascans.DataScanJob, - Awaitable[datascans.DataScanJob] - ]]: - raise NotImplementedError() - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - Union[ - datascans.ListDataScanJobsResponse, - Awaitable[datascans.ListDataScanJobsResponse] - ]]: - raise NotImplementedError() - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - Union[ - datascans.GenerateDataQualityRulesResponse, - Awaitable[datascans.GenerateDataQualityRulesResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataScanServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py deleted file mode 100644 index 7fa57f382e72..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py +++ /dev/null @@ -1,615 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO - - -class DataScanServiceGrpcTransport(DataScanServiceTransport): - """gRPC backend transport for DataScanService. - - DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data scan method over gRPC. - - Creates a DataScan resource. - - Returns: - Callable[[~.CreateDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_scan' not in self._stubs: - self._stubs['create_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', - request_serializer=datascans.CreateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_scan'] - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data scan method over gRPC. - - Updates a DataScan resource. - - Returns: - Callable[[~.UpdateDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_scan' not in self._stubs: - self._stubs['update_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', - request_serializer=datascans.UpdateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_scan'] - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data scan method over gRPC. - - Deletes a DataScan resource. - - Returns: - Callable[[~.DeleteDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_scan' not in self._stubs: - self._stubs['delete_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', - request_serializer=datascans.DeleteDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_scan'] - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - datascans.DataScan]: - r"""Return a callable for the get data scan method over gRPC. - - Gets a DataScan resource. - - Returns: - Callable[[~.GetDataScanRequest], - ~.DataScan]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan' not in self._stubs: - self._stubs['get_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScan', - request_serializer=datascans.GetDataScanRequest.serialize, - response_deserializer=datascans.DataScan.deserialize, - ) - return self._stubs['get_data_scan'] - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - datascans.ListDataScansResponse]: - r"""Return a callable for the list data scans method over gRPC. - - Lists DataScans. - - Returns: - Callable[[~.ListDataScansRequest], - ~.ListDataScansResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scans' not in self._stubs: - self._stubs['list_data_scans'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScans', - request_serializer=datascans.ListDataScansRequest.serialize, - response_deserializer=datascans.ListDataScansResponse.deserialize, - ) - return self._stubs['list_data_scans'] - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - datascans.RunDataScanResponse]: - r"""Return a callable for the run data scan method over gRPC. - - Runs an on-demand execution of a DataScan - - Returns: - Callable[[~.RunDataScanRequest], - ~.RunDataScanResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_data_scan' not in self._stubs: - self._stubs['run_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/RunDataScan', - request_serializer=datascans.RunDataScanRequest.serialize, - response_deserializer=datascans.RunDataScanResponse.deserialize, - ) - return self._stubs['run_data_scan'] - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - datascans.DataScanJob]: - r"""Return a callable for the get data scan job method over gRPC. - - Gets a DataScanJob resource. - - Returns: - Callable[[~.GetDataScanJobRequest], - ~.DataScanJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan_job' not in self._stubs: - self._stubs['get_data_scan_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', - request_serializer=datascans.GetDataScanJobRequest.serialize, - response_deserializer=datascans.DataScanJob.deserialize, - ) - return self._stubs['get_data_scan_job'] - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - datascans.ListDataScanJobsResponse]: - r"""Return a callable for the list data scan jobs method over gRPC. - - Lists DataScanJobs under the given DataScan. - - Returns: - Callable[[~.ListDataScanJobsRequest], - ~.ListDataScanJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scan_jobs' not in self._stubs: - self._stubs['list_data_scan_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', - request_serializer=datascans.ListDataScanJobsRequest.serialize, - response_deserializer=datascans.ListDataScanJobsResponse.deserialize, - ) - return self._stubs['list_data_scan_jobs'] - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - datascans.GenerateDataQualityRulesResponse]: - r"""Return a callable for the generate data quality rules method over gRPC. - - Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - Returns: - Callable[[~.GenerateDataQualityRulesRequest], - ~.GenerateDataQualityRulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_data_quality_rules' not in self._stubs: - self._stubs['generate_data_quality_rules'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', - request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, - response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, - ) - return self._stubs['generate_data_quality_rules'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataScanServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py deleted file mode 100644 index d93e93a9d327..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,706 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataScanServiceGrpcTransport - - -class DataScanServiceGrpcAsyncIOTransport(DataScanServiceTransport): - """gRPC AsyncIO backend transport for DataScanService. - - DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data scan method over gRPC. - - Creates a DataScan resource. - - Returns: - Callable[[~.CreateDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_scan' not in self._stubs: - self._stubs['create_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', - request_serializer=datascans.CreateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_scan'] - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data scan method over gRPC. - - Updates a DataScan resource. - - Returns: - Callable[[~.UpdateDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_scan' not in self._stubs: - self._stubs['update_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', - request_serializer=datascans.UpdateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_scan'] - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data scan method over gRPC. - - Deletes a DataScan resource. - - Returns: - Callable[[~.DeleteDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_scan' not in self._stubs: - self._stubs['delete_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', - request_serializer=datascans.DeleteDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_scan'] - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - Awaitable[datascans.DataScan]]: - r"""Return a callable for the get data scan method over gRPC. - - Gets a DataScan resource. - - Returns: - Callable[[~.GetDataScanRequest], - Awaitable[~.DataScan]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan' not in self._stubs: - self._stubs['get_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScan', - request_serializer=datascans.GetDataScanRequest.serialize, - response_deserializer=datascans.DataScan.deserialize, - ) - return self._stubs['get_data_scan'] - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - Awaitable[datascans.ListDataScansResponse]]: - r"""Return a callable for the list data scans method over gRPC. - - Lists DataScans. - - Returns: - Callable[[~.ListDataScansRequest], - Awaitable[~.ListDataScansResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scans' not in self._stubs: - self._stubs['list_data_scans'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScans', - request_serializer=datascans.ListDataScansRequest.serialize, - response_deserializer=datascans.ListDataScansResponse.deserialize, - ) - return self._stubs['list_data_scans'] - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - Awaitable[datascans.RunDataScanResponse]]: - r"""Return a callable for the run data scan method over gRPC. - - Runs an on-demand execution of a DataScan - - Returns: - Callable[[~.RunDataScanRequest], - Awaitable[~.RunDataScanResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_data_scan' not in self._stubs: - self._stubs['run_data_scan'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/RunDataScan', - request_serializer=datascans.RunDataScanRequest.serialize, - response_deserializer=datascans.RunDataScanResponse.deserialize, - ) - return self._stubs['run_data_scan'] - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - Awaitable[datascans.DataScanJob]]: - r"""Return a callable for the get data scan job method over gRPC. - - Gets a DataScanJob resource. - - Returns: - Callable[[~.GetDataScanJobRequest], - Awaitable[~.DataScanJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan_job' not in self._stubs: - self._stubs['get_data_scan_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', - request_serializer=datascans.GetDataScanJobRequest.serialize, - response_deserializer=datascans.DataScanJob.deserialize, - ) - return self._stubs['get_data_scan_job'] - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - Awaitable[datascans.ListDataScanJobsResponse]]: - r"""Return a callable for the list data scan jobs method over gRPC. - - Lists DataScanJobs under the given DataScan. - - Returns: - Callable[[~.ListDataScanJobsRequest], - Awaitable[~.ListDataScanJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scan_jobs' not in self._stubs: - self._stubs['list_data_scan_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', - request_serializer=datascans.ListDataScanJobsRequest.serialize, - response_deserializer=datascans.ListDataScanJobsResponse.deserialize, - ) - return self._stubs['list_data_scan_jobs'] - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - Awaitable[datascans.GenerateDataQualityRulesResponse]]: - r"""Return a callable for the generate data quality rules method over gRPC. - - Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - Returns: - Callable[[~.GenerateDataQualityRulesRequest], - Awaitable[~.GenerateDataQualityRulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_data_quality_rules' not in self._stubs: - self._stubs['generate_data_quality_rules'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', - request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, - response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, - ) - return self._stubs['generate_data_quality_rules'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_scan: self._wrap_method( - self.create_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.update_data_scan: self._wrap_method( - self.update_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_scan: self._wrap_method( - self.delete_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan: self._wrap_method( - self.get_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scans: self._wrap_method( - self.list_data_scans, - default_timeout=None, - client_info=client_info, - ), - self.run_data_scan: self._wrap_method( - self.run_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan_job: self._wrap_method( - self.get_data_scan_job, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scan_jobs: self._wrap_method( - self.list_data_scan_jobs, - default_timeout=None, - client_info=client_info, - ), - self.generate_data_quality_rules: self._wrap_method( - self.generate_data_quality_rules, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataScanServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py deleted file mode 100644 index d53da31dabe7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataTaxonomyServiceClient -from .async_client import DataTaxonomyServiceAsyncClient - -__all__ = ( - 'DataTaxonomyServiceClient', - 'DataTaxonomyServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py deleted file mode 100644 index 790d7cdf77dc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ /dev/null @@ -1,2420 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport -from .client import DataTaxonomyServiceClient - - -class DataTaxonomyServiceAsyncClient: - """DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - """ - - _client: DataTaxonomyServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - - data_attribute_path = staticmethod(DataTaxonomyServiceClient.data_attribute_path) - parse_data_attribute_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_path) - data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.data_attribute_binding_path) - parse_data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_binding_path) - data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.data_taxonomy_path) - parse_data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.parse_data_taxonomy_path) - common_billing_account_path = staticmethod(DataTaxonomyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataTaxonomyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataTaxonomyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataTaxonomyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataTaxonomyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataTaxonomyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataTaxonomyServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataTaxonomyServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataTaxonomyServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataTaxonomyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceAsyncClient: The constructed client. - """ - return DataTaxonomyServiceClient.from_service_account_info.__func__(DataTaxonomyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceAsyncClient: The constructed client. - """ - return DataTaxonomyServiceClient.from_service_account_file.__func__(DataTaxonomyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataTaxonomyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataTaxonomyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTaxonomyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataTaxonomyServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data taxonomy service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTaxonomyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataTaxonomyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - data_taxonomy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]]): - The request object. Create DataTaxonomy request. - parent (:class:`str`): - Required. The resource name of the data taxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): - Required. DataTaxonomy resource. - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy_id (:class:`str`): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - - This corresponds to the ``data_taxonomy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): - request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if data_taxonomy_id is not None: - request.data_taxonomy_id = data_taxonomy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, - *, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]]): - The request object. Update DataTaxonomy request. - data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_taxonomy, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_taxonomy.name", request.data_taxonomy.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_taxonomy(self, - request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]]): - The request object. Delete DataTaxonomy request. - name (:class:`str`): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): - request = data_taxonomy.DeleteDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_taxonomies(self, - request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataTaxonomiesAsyncPager: - r"""Lists DataTaxonomy resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]]): - The request object. List DataTaxonomies request. - parent (:class:`str`): - Required. The resource name of the DataTaxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager: - List DataTaxonomies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): - request = data_taxonomy.ListDataTaxonomiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataTaxonomiesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_taxonomy(self, - request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataTaxonomy: - r"""Retrieves a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]]): - The request object. Get DataTaxonomy request. - name (:class:`str`): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataTaxonomy: - DataTaxonomy represents a set of - hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have - attributes to manage PII data. It is - defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): - request = data_taxonomy.GetDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - data_attribute_binding_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]]): - The request object. Create DataAttributeBinding request. - parent (:class:`str`): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): - Required. DataAttributeBinding - resource. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding_id (:class:`str`): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - - This corresponds to the ``data_attribute_binding_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_attribute_binding, data_attribute_binding_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): - request = data_taxonomy.CreateDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if data_attribute_binding_id is not None: - request.data_attribute_binding_id = data_attribute_binding_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, - *, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]]): - The request object. Update DataAttributeBinding request. - data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_attribute_binding, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): - request = data_taxonomy.UpdateDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute_binding.name", request.data_attribute_binding.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]]): - The request object. Delete DataAttributeBinding request. - name (:class:`str`): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): - request = data_taxonomy.DeleteDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_attribute_bindings(self, - request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataAttributeBindingsAsyncPager: - r"""Lists DataAttributeBinding resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]]): - The request object. List DataAttributeBindings request. - parent (:class:`str`): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager: - List DataAttributeBindings response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): - request = data_taxonomy.ListDataAttributeBindingsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attribute_bindings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataAttributeBindingsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataAttributeBinding: - r"""Retrieves a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]]): - The request object. Get DataAttributeBinding request. - name (:class:`str`): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataAttributeBinding: - DataAttributeBinding represents - binding of attributes to resources. Eg: - Bind 'CustomerInfo' entity with 'PII' - attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): - request = data_taxonomy.GetDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_attribute(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - data_attribute_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]]): - The request object. Create DataAttribute request. - parent (:class:`str`): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): - Required. DataAttribute resource. - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_id (:class:`str`): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - - This corresponds to the ``data_attribute_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_attribute, data_attribute_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): - request = data_taxonomy.CreateDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute is not None: - request.data_attribute = data_attribute - if data_attribute_id is not None: - request.data_attribute_id = data_attribute_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_attribute(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, - *, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]]): - The request object. Update DataAttribute request. - data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_attribute, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): - request = data_taxonomy.UpdateDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute is not None: - request.data_attribute = data_attribute - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute.name", request.data_attribute.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_attribute(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]]): - The request object. Delete DataAttribute request. - name (:class:`str`): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): - request = data_taxonomy.DeleteDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_attributes(self, - request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataAttributesAsyncPager: - r"""Lists Data Attribute resources in a DataTaxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]]): - The request object. List DataAttributes request. - parent (:class:`str`): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager: - List DataAttributes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributesRequest): - request = data_taxonomy.ListDataAttributesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attributes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataAttributesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_attribute(self, - request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataAttribute: - r"""Retrieves a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]]): - The request object. Get DataAttribute request. - name (:class:`str`): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataAttribute: - Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeRequest): - request = data_taxonomy.GetDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataTaxonomyServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataTaxonomyServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py deleted file mode 100644 index 599bb5d7fd8f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ /dev/null @@ -1,2746 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataTaxonomyServiceGrpcTransport -from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport - - -class DataTaxonomyServiceClientMeta(type): - """Metaclass for the DataTaxonomyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] - _transport_registry["grpc"] = DataTaxonomyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataTaxonomyServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataTaxonomyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataTaxonomyServiceClient(metaclass=DataTaxonomyServiceClientMeta): - """DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataTaxonomyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTaxonomyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_attribute_path(project: str,location: str,dataTaxonomy: str,data_attribute_id: str,) -> str: - """Returns a fully-qualified data_attribute string.""" - return "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) - - @staticmethod - def parse_data_attribute_path(path: str) -> Dict[str,str]: - """Parses a data_attribute path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)/attributes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_attribute_binding_path(project: str,location: str,data_attribute_binding_id: str,) -> str: - """Returns a fully-qualified data_attribute_binding string.""" - return "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) - - @staticmethod - def parse_data_attribute_binding_path(path: str) -> Dict[str,str]: - """Parses a data_attribute_binding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataAttributeBindings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_taxonomy_path(project: str,location: str,data_taxonomy_id: str,) -> str: - """Returns a fully-qualified data_taxonomy string.""" - return "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) - - @staticmethod - def parse_data_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a data_taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data taxonomy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTaxonomyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataTaxonomyServiceClient._read_environment_variables() - self._client_cert_source = DataTaxonomyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataTaxonomyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataTaxonomyServiceTransport) - if transport_provided: - # transport is a DataTaxonomyServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataTaxonomyServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataTaxonomyServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport]] = ( - DataTaxonomyServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataTaxonomyServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - data_taxonomy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Create a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]): - The request object. Create DataTaxonomy request. - parent (str): - Required. The resource name of the data taxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. DataTaxonomy resource. - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy_id (str): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - - This corresponds to the ``data_taxonomy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_taxonomy, data_taxonomy_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): - request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if data_taxonomy_id is not None: - request.data_taxonomy_id = data_taxonomy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, - *, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]): - The request object. Update DataTaxonomy request. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_taxonomy, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_taxonomy.name", request.data_taxonomy.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_taxonomy(self, - request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]): - The request object. Delete DataTaxonomy request. - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): - request = data_taxonomy.DeleteDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_taxonomies(self, - request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataTaxonomiesPager: - r"""Lists DataTaxonomy resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]): - The request object. List DataTaxonomies request. - parent (str): - Required. The resource name of the DataTaxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager: - List DataTaxonomies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): - request = data_taxonomy.ListDataTaxonomiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataTaxonomiesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_taxonomy(self, - request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataTaxonomy: - r"""Retrieves a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]): - The request object. Get DataTaxonomy request. - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataTaxonomy: - DataTaxonomy represents a set of - hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have - attributes to manage PII data. It is - defined at project level. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): - request = data_taxonomy.GetDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - data_attribute_binding_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Create a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]): - The request object. Create DataAttributeBinding request. - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. DataAttributeBinding - resource. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding_id (str): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - - This corresponds to the ``data_attribute_binding_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_attribute_binding, data_attribute_binding_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): - request = data_taxonomy.CreateDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if data_attribute_binding_id is not None: - request.data_attribute_binding_id = data_attribute_binding_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, - *, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]): - The request object. Update DataAttributeBinding request. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_attribute_binding, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): - request = data_taxonomy.UpdateDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute_binding.name", request.data_attribute_binding.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]): - The request object. Delete DataAttributeBinding request. - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): - request = data_taxonomy.DeleteDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_attribute_bindings(self, - request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataAttributeBindingsPager: - r"""Lists DataAttributeBinding resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]): - The request object. List DataAttributeBindings request. - parent (str): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager: - List DataAttributeBindings response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): - request = data_taxonomy.ListDataAttributeBindingsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_attribute_bindings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataAttributeBindingsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataAttributeBinding: - r"""Retrieves a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]): - The request object. Get DataAttributeBinding request. - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataAttributeBinding: - DataAttributeBinding represents - binding of attributes to resources. Eg: - Bind 'CustomerInfo' entity with 'PII' - attribute. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): - request = data_taxonomy.GetDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_attribute(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - data_attribute_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Create a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]): - The request object. Create DataAttribute request. - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. DataAttribute resource. - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_id (str): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - - This corresponds to the ``data_attribute_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_attribute, data_attribute_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): - request = data_taxonomy.CreateDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute is not None: - request.data_attribute = data_attribute - if data_attribute_id is not None: - request.data_attribute_id = data_attribute_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_attribute(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, - *, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]): - The request object. Update DataAttribute request. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_attribute, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): - request = data_taxonomy.UpdateDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute is not None: - request.data_attribute = data_attribute - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute.name", request.data_attribute.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_attribute(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]): - The request object. Delete DataAttribute request. - name (str): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): - request = data_taxonomy.DeleteDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_attributes(self, - request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDataAttributesPager: - r"""Lists Data Attribute resources in a DataTaxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]): - The request object. List DataAttributes request. - parent (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager: - List DataAttributes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributesRequest): - request = data_taxonomy.ListDataAttributesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_attributes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataAttributesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_attribute(self, - request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> data_taxonomy.DataAttribute: - r"""Retrieves a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]): - The request object. Get DataAttribute request. - name (str): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.DataAttribute: - Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`\` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeRequest): - request = data_taxonomy.GetDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataTaxonomyServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataTaxonomyServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py deleted file mode 100644 index 787b71f82ce8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy - - -class ListDataTaxonomiesPager: - """A pager for iterating through ``list_data_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_taxonomies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataTaxonomies`` requests and continue to iterate - through the ``data_taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataTaxonomiesResponse], - request: data_taxonomy.ListDataTaxonomiesRequest, - response: data_taxonomy.ListDataTaxonomiesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataTaxonomiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataTaxonomy]: - for page in self.pages: - yield from page.data_taxonomies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataTaxonomiesAsyncPager: - """A pager for iterating through ``list_data_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_taxonomies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataTaxonomies`` requests and continue to iterate - through the ``data_taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataTaxonomiesResponse]], - request: data_taxonomy.ListDataTaxonomiesRequest, - response: data_taxonomy.ListDataTaxonomiesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataTaxonomiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataTaxonomy]: - async def async_generator(): - async for page in self.pages: - for response in page.data_taxonomies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributeBindingsPager: - """A pager for iterating through ``list_data_attribute_bindings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_attribute_bindings`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataAttributeBindings`` requests and continue to iterate - through the ``data_attribute_bindings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataAttributeBindingsResponse], - request: data_taxonomy.ListDataAttributeBindingsRequest, - response: data_taxonomy.ListDataAttributeBindingsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataAttributeBindingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataAttributeBinding]: - for page in self.pages: - yield from page.data_attribute_bindings - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributeBindingsAsyncPager: - """A pager for iterating through ``list_data_attribute_bindings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_attribute_bindings`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataAttributeBindings`` requests and continue to iterate - through the ``data_attribute_bindings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]], - request: data_taxonomy.ListDataAttributeBindingsRequest, - response: data_taxonomy.ListDataAttributeBindingsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributeBindingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttributeBinding]: - async def async_generator(): - async for page in self.pages: - for response in page.data_attribute_bindings: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributesPager: - """A pager for iterating through ``list_data_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_attributes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataAttributes`` requests and continue to iterate - through the ``data_attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataAttributesResponse], - request: data_taxonomy.ListDataAttributesRequest, - response: data_taxonomy.ListDataAttributesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataAttribute]: - for page in self.pages: - yield from page.data_attributes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributesAsyncPager: - """A pager for iterating through ``list_data_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_attributes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataAttributes`` requests and continue to iterate - through the ``data_attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataAttributesResponse]], - request: data_taxonomy.ListDataAttributesRequest, - response: data_taxonomy.ListDataAttributesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttribute]: - async def async_generator(): - async for page in self.pages: - for response in page.data_attributes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst deleted file mode 100644 index 5c194fc01362..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataTaxonomyServiceTransport` is the ABC for all transports. -- public child `DataTaxonomyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataTaxonomyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataTaxonomyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataTaxonomyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py deleted file mode 100644 index aeab503c70d4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataTaxonomyServiceTransport -from .grpc import DataTaxonomyServiceGrpcTransport -from .grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] -_transport_registry['grpc'] = DataTaxonomyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataTaxonomyServiceGrpcAsyncIOTransport - -__all__ = ( - 'DataTaxonomyServiceTransport', - 'DataTaxonomyServiceGrpcTransport', - 'DataTaxonomyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py deleted file mode 100644 index 526e35b387b1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py +++ /dev/null @@ -1,443 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataTaxonomyServiceTransport(abc.ABC): - """Abstract transport class for DataTaxonomyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_taxonomy: gapic_v1.method.wrap_method( - self.create_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_data_taxonomy: gapic_v1.method.wrap_method( - self.update_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_taxonomy: gapic_v1.method.wrap_method( - self.delete_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_data_taxonomies: gapic_v1.method.wrap_method( - self.list_data_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_data_taxonomy: gapic_v1.method.wrap_method( - self.get_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute_binding: gapic_v1.method.wrap_method( - self.create_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute_binding: gapic_v1.method.wrap_method( - self.update_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute_binding: gapic_v1.method.wrap_method( - self.delete_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attribute_bindings: gapic_v1.method.wrap_method( - self.list_data_attribute_bindings, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute_binding: gapic_v1.method.wrap_method( - self.get_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute: gapic_v1.method.wrap_method( - self.create_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute: gapic_v1.method.wrap_method( - self.update_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute: gapic_v1.method.wrap_method( - self.delete_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attributes: gapic_v1.method.wrap_method( - self.list_data_attributes, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute: gapic_v1.method.wrap_method( - self.get_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - Union[ - data_taxonomy.ListDataTaxonomiesResponse, - Awaitable[data_taxonomy.ListDataTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - Union[ - data_taxonomy.DataTaxonomy, - Awaitable[data_taxonomy.DataTaxonomy] - ]]: - raise NotImplementedError() - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - Union[ - data_taxonomy.ListDataAttributeBindingsResponse, - Awaitable[data_taxonomy.ListDataAttributeBindingsResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - Union[ - data_taxonomy.DataAttributeBinding, - Awaitable[data_taxonomy.DataAttributeBinding] - ]]: - raise NotImplementedError() - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - Union[ - data_taxonomy.ListDataAttributesResponse, - Awaitable[data_taxonomy.ListDataAttributesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - Union[ - data_taxonomy.DataAttribute, - Awaitable[data_taxonomy.DataAttribute] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataTaxonomyServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py deleted file mode 100644 index d7703acdda72..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py +++ /dev/null @@ -1,773 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO - - -class DataTaxonomyServiceGrpcTransport(DataTaxonomyServiceTransport): - """gRPC backend transport for DataTaxonomyService. - - DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data taxonomy method over gRPC. - - Create a DataTaxonomy resource. - - Returns: - Callable[[~.CreateDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_taxonomy' not in self._stubs: - self._stubs['create_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', - request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_taxonomy'] - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data taxonomy method over gRPC. - - Updates a DataTaxonomy resource. - - Returns: - Callable[[~.UpdateDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_taxonomy' not in self._stubs: - self._stubs['update_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', - request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_taxonomy'] - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data taxonomy method over gRPC. - - Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - Returns: - Callable[[~.DeleteDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_taxonomy' not in self._stubs: - self._stubs['delete_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', - request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_taxonomy'] - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - data_taxonomy.ListDataTaxonomiesResponse]: - r"""Return a callable for the list data taxonomies method over gRPC. - - Lists DataTaxonomy resources in a project and - location. - - Returns: - Callable[[~.ListDataTaxonomiesRequest], - ~.ListDataTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_taxonomies' not in self._stubs: - self._stubs['list_data_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', - request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, - response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, - ) - return self._stubs['list_data_taxonomies'] - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - data_taxonomy.DataTaxonomy]: - r"""Return a callable for the get data taxonomy method over gRPC. - - Retrieves a DataTaxonomy resource. - - Returns: - Callable[[~.GetDataTaxonomyRequest], - ~.DataTaxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_taxonomy' not in self._stubs: - self._stubs['get_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', - request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, - response_deserializer=data_taxonomy.DataTaxonomy.deserialize, - ) - return self._stubs['get_data_taxonomy'] - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data attribute binding method over gRPC. - - Create a DataAttributeBinding resource. - - Returns: - Callable[[~.CreateDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute_binding' not in self._stubs: - self._stubs['create_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', - request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute_binding'] - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data attribute binding method over gRPC. - - Updates a DataAttributeBinding resource. - - Returns: - Callable[[~.UpdateDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute_binding' not in self._stubs: - self._stubs['update_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', - request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute_binding'] - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data attribute binding method over gRPC. - - Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - Returns: - Callable[[~.DeleteDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute_binding' not in self._stubs: - self._stubs['delete_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', - request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute_binding'] - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - data_taxonomy.ListDataAttributeBindingsResponse]: - r"""Return a callable for the list data attribute bindings method over gRPC. - - Lists DataAttributeBinding resources in a project and - location. - - Returns: - Callable[[~.ListDataAttributeBindingsRequest], - ~.ListDataAttributeBindingsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attribute_bindings' not in self._stubs: - self._stubs['list_data_attribute_bindings'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', - request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, - ) - return self._stubs['list_data_attribute_bindings'] - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - data_taxonomy.DataAttributeBinding]: - r"""Return a callable for the get data attribute binding method over gRPC. - - Retrieves a DataAttributeBinding resource. - - Returns: - Callable[[~.GetDataAttributeBindingRequest], - ~.DataAttributeBinding]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute_binding' not in self._stubs: - self._stubs['get_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', - request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, - response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, - ) - return self._stubs['get_data_attribute_binding'] - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data attribute method over gRPC. - - Create a DataAttribute resource. - - Returns: - Callable[[~.CreateDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute' not in self._stubs: - self._stubs['create_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', - request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute'] - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data attribute method over gRPC. - - Updates a DataAttribute resource. - - Returns: - Callable[[~.UpdateDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute' not in self._stubs: - self._stubs['update_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', - request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute'] - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data attribute method over gRPC. - - Deletes a Data Attribute resource. - - Returns: - Callable[[~.DeleteDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute' not in self._stubs: - self._stubs['delete_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', - request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute'] - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - data_taxonomy.ListDataAttributesResponse]: - r"""Return a callable for the list data attributes method over gRPC. - - Lists Data Attribute resources in a DataTaxonomy. - - Returns: - Callable[[~.ListDataAttributesRequest], - ~.ListDataAttributesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attributes' not in self._stubs: - self._stubs['list_data_attributes'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', - request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, - ) - return self._stubs['list_data_attributes'] - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - data_taxonomy.DataAttribute]: - r"""Return a callable for the get data attribute method over gRPC. - - Retrieves a Data Attribute resource. - - Returns: - Callable[[~.GetDataAttributeRequest], - ~.DataAttribute]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute' not in self._stubs: - self._stubs['get_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', - request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, - response_deserializer=data_taxonomy.DataAttribute.deserialize, - ) - return self._stubs['get_data_attribute'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataTaxonomyServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py deleted file mode 100644 index a8b233e044c8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,894 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataTaxonomyServiceGrpcTransport - - -class DataTaxonomyServiceGrpcAsyncIOTransport(DataTaxonomyServiceTransport): - """gRPC AsyncIO backend transport for DataTaxonomyService. - - DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data taxonomy method over gRPC. - - Create a DataTaxonomy resource. - - Returns: - Callable[[~.CreateDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_taxonomy' not in self._stubs: - self._stubs['create_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', - request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_taxonomy'] - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data taxonomy method over gRPC. - - Updates a DataTaxonomy resource. - - Returns: - Callable[[~.UpdateDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_taxonomy' not in self._stubs: - self._stubs['update_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', - request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_taxonomy'] - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data taxonomy method over gRPC. - - Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - Returns: - Callable[[~.DeleteDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_taxonomy' not in self._stubs: - self._stubs['delete_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', - request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_taxonomy'] - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - Awaitable[data_taxonomy.ListDataTaxonomiesResponse]]: - r"""Return a callable for the list data taxonomies method over gRPC. - - Lists DataTaxonomy resources in a project and - location. - - Returns: - Callable[[~.ListDataTaxonomiesRequest], - Awaitable[~.ListDataTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_taxonomies' not in self._stubs: - self._stubs['list_data_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', - request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, - response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, - ) - return self._stubs['list_data_taxonomies'] - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - Awaitable[data_taxonomy.DataTaxonomy]]: - r"""Return a callable for the get data taxonomy method over gRPC. - - Retrieves a DataTaxonomy resource. - - Returns: - Callable[[~.GetDataTaxonomyRequest], - Awaitable[~.DataTaxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_taxonomy' not in self._stubs: - self._stubs['get_data_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', - request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, - response_deserializer=data_taxonomy.DataTaxonomy.deserialize, - ) - return self._stubs['get_data_taxonomy'] - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data attribute binding method over gRPC. - - Create a DataAttributeBinding resource. - - Returns: - Callable[[~.CreateDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute_binding' not in self._stubs: - self._stubs['create_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', - request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute_binding'] - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data attribute binding method over gRPC. - - Updates a DataAttributeBinding resource. - - Returns: - Callable[[~.UpdateDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute_binding' not in self._stubs: - self._stubs['update_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', - request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute_binding'] - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data attribute binding method over gRPC. - - Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - Returns: - Callable[[~.DeleteDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute_binding' not in self._stubs: - self._stubs['delete_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', - request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute_binding'] - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]]: - r"""Return a callable for the list data attribute bindings method over gRPC. - - Lists DataAttributeBinding resources in a project and - location. - - Returns: - Callable[[~.ListDataAttributeBindingsRequest], - Awaitable[~.ListDataAttributeBindingsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attribute_bindings' not in self._stubs: - self._stubs['list_data_attribute_bindings'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', - request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, - ) - return self._stubs['list_data_attribute_bindings'] - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - Awaitable[data_taxonomy.DataAttributeBinding]]: - r"""Return a callable for the get data attribute binding method over gRPC. - - Retrieves a DataAttributeBinding resource. - - Returns: - Callable[[~.GetDataAttributeBindingRequest], - Awaitable[~.DataAttributeBinding]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute_binding' not in self._stubs: - self._stubs['get_data_attribute_binding'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', - request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, - response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, - ) - return self._stubs['get_data_attribute_binding'] - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data attribute method over gRPC. - - Create a DataAttribute resource. - - Returns: - Callable[[~.CreateDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute' not in self._stubs: - self._stubs['create_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', - request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute'] - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data attribute method over gRPC. - - Updates a DataAttribute resource. - - Returns: - Callable[[~.UpdateDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute' not in self._stubs: - self._stubs['update_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', - request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute'] - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data attribute method over gRPC. - - Deletes a Data Attribute resource. - - Returns: - Callable[[~.DeleteDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute' not in self._stubs: - self._stubs['delete_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', - request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute'] - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - Awaitable[data_taxonomy.ListDataAttributesResponse]]: - r"""Return a callable for the list data attributes method over gRPC. - - Lists Data Attribute resources in a DataTaxonomy. - - Returns: - Callable[[~.ListDataAttributesRequest], - Awaitable[~.ListDataAttributesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attributes' not in self._stubs: - self._stubs['list_data_attributes'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', - request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, - ) - return self._stubs['list_data_attributes'] - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - Awaitable[data_taxonomy.DataAttribute]]: - r"""Return a callable for the get data attribute method over gRPC. - - Retrieves a Data Attribute resource. - - Returns: - Callable[[~.GetDataAttributeRequest], - Awaitable[~.DataAttribute]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute' not in self._stubs: - self._stubs['get_data_attribute'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', - request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, - response_deserializer=data_taxonomy.DataAttribute.deserialize, - ) - return self._stubs['get_data_attribute'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_taxonomy: self._wrap_method( - self.create_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_data_taxonomy: self._wrap_method( - self.update_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_taxonomy: self._wrap_method( - self.delete_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_data_taxonomies: self._wrap_method( - self.list_data_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_data_taxonomy: self._wrap_method( - self.get_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute_binding: self._wrap_method( - self.create_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute_binding: self._wrap_method( - self.update_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute_binding: self._wrap_method( - self.delete_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attribute_bindings: self._wrap_method( - self.list_data_attribute_bindings, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute_binding: self._wrap_method( - self.get_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute: self._wrap_method( - self.create_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute: self._wrap_method( - self.update_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute: self._wrap_method( - self.delete_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attributes: self._wrap_method( - self.list_data_attributes, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute: self._wrap_method( - self.get_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataTaxonomyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py deleted file mode 100644 index 26191ecad898..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataplexServiceClient -from .async_client import DataplexServiceAsyncClient - -__all__ = ( - 'DataplexServiceClient', - 'DataplexServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py deleted file mode 100644 index bcfddabeab46..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ /dev/null @@ -1,4580 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport -from .client import DataplexServiceClient - - -class DataplexServiceAsyncClient: - """Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - """ - - _client: DataplexServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataplexServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataplexServiceClient._DEFAULT_UNIVERSE - - action_path = staticmethod(DataplexServiceClient.action_path) - parse_action_path = staticmethod(DataplexServiceClient.parse_action_path) - asset_path = staticmethod(DataplexServiceClient.asset_path) - parse_asset_path = staticmethod(DataplexServiceClient.parse_asset_path) - environment_path = staticmethod(DataplexServiceClient.environment_path) - parse_environment_path = staticmethod(DataplexServiceClient.parse_environment_path) - job_path = staticmethod(DataplexServiceClient.job_path) - parse_job_path = staticmethod(DataplexServiceClient.parse_job_path) - lake_path = staticmethod(DataplexServiceClient.lake_path) - parse_lake_path = staticmethod(DataplexServiceClient.parse_lake_path) - session_path = staticmethod(DataplexServiceClient.session_path) - parse_session_path = staticmethod(DataplexServiceClient.parse_session_path) - task_path = staticmethod(DataplexServiceClient.task_path) - parse_task_path = staticmethod(DataplexServiceClient.parse_task_path) - zone_path = staticmethod(DataplexServiceClient.zone_path) - parse_zone_path = staticmethod(DataplexServiceClient.parse_zone_path) - common_billing_account_path = staticmethod(DataplexServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataplexServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataplexServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataplexServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataplexServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataplexServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataplexServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataplexServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataplexServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataplexServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceAsyncClient: The constructed client. - """ - return DataplexServiceClient.from_service_account_info.__func__(DataplexServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceAsyncClient: The constructed client. - """ - return DataplexServiceClient.from_service_account_file.__func__(DataplexServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataplexServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataplexServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataplexServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataplexServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dataplex service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataplexServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataplexServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_lake(self, - request: Optional[Union[service.CreateLakeRequest, dict]] = None, - *, - parent: Optional[str] = None, - lake: Optional[resources.Lake] = None, - lake_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]]): - The request object. Create lake request. - parent (:class:`str`): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake (:class:`google.cloud.dataplex_v1.types.Lake`): - Required. Lake resource - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake_id (:class:`str`): - Required. Lake identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / - location. - - This corresponds to the ``lake_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lake, lake_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateLakeRequest): - request = service.CreateLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lake is not None: - request.lake = lake - if lake_id is not None: - request.lake_id = lake_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_lake(self, - request: Optional[Union[service.UpdateLakeRequest, dict]] = None, - *, - lake: Optional[resources.Lake] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]]): - The request object. Update lake request. - lake (:class:`google.cloud.dataplex_v1.types.Lake`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([lake, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateLakeRequest): - request = service.UpdateLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if lake is not None: - request.lake = lake - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("lake.name", request.lake.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_lake(self, - request: Optional[Union[service.DeleteLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]]): - The request object. Delete lake request. - name (:class:`str`): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteLakeRequest): - request = service.DeleteLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_lakes(self, - request: Optional[Union[service.ListLakesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLakesAsyncPager: - r"""Lists lake resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]]): - The request object. List lakes request. - parent (:class:`str`): - Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager: - List lakes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakesRequest): - request = service.ListLakesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_lakes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLakesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_lake(self, - request: Optional[Union[service.GetLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Lake: - r"""Retrieves a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lake(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]]): - The request object. Get lake request. - name (:class:`str`): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Lake: - A lake is a centralized repository - for managing enterprise data across the - organization distributed across many - cloud projects, and stored in a variety - of storage services such as Google Cloud - Storage and BigQuery. The resources - attached to a lake are referred to as - managed resources. Data within these - managed resources can be structured or - unstructured. A lake provides data - admins with tools to organize, secure - and manage their data at scale, and - provides data scientists and data - engineers an integrated experience to - easily search, discover, analyze and - transform data and associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetLakeRequest): - request = service.GetLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_lake_actions(self, - request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLakeActionsAsyncPager: - r"""Lists action resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]]): - The request object. List lake actions request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakeActionsRequest): - request = service.ListLakeActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_lake_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLakeActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_zone(self, - request: Optional[Union[service.CreateZoneRequest, dict]] = None, - *, - parent: Optional[str] = None, - zone: Optional[resources.Zone] = None, - zone_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a zone resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]]): - The request object. Create zone request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone (:class:`google.cloud.dataplex_v1.types.Zone`): - Required. Zone resource. - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone_id (:class:`str`): - Required. Zone identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in - a project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - - This corresponds to the ``zone_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, zone, zone_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateZoneRequest): - request = service.CreateZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if zone is not None: - request.zone = zone - if zone_id is not None: - request.zone_id = zone_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_zone(self, - request: Optional[Union[service.UpdateZoneRequest, dict]] = None, - *, - zone: Optional[resources.Zone] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]]): - The request object. Update zone request. - zone (:class:`google.cloud.dataplex_v1.types.Zone`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([zone, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateZoneRequest): - request = service.UpdateZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if zone is not None: - request.zone = zone - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("zone.name", request.zone.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_zone(self, - request: Optional[Union[service.DeleteZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]]): - The request object. Delete zone request. - name (:class:`str`): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteZoneRequest): - request = service.DeleteZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_zones(self, - request: Optional[Union[service.ListZonesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListZonesAsyncPager: - r"""Lists zone resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]]): - The request object. List zones request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager: - List zones response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZonesRequest): - request = service.ListZonesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_zones] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListZonesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_zone(self, - request: Optional[Union[service.GetZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Zone: - r"""Retrieves a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = await client.get_zone(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]]): - The request object. Get zone request. - name (:class:`str`): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Zone: - A zone represents a logical group of - related assets within a lake. A zone can - be used to map to organizational - structure or represent stages of data - readiness from raw to curated. It - provides managing behavior that is - shared or inherited by all contained - assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetZoneRequest): - request = service.GetZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_zone_actions(self, - request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListZoneActionsAsyncPager: - r"""Lists action resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]]): - The request object. List zone actions request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZoneActionsRequest): - request = service.ListZoneActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_zone_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListZoneActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_asset(self, - request: Optional[Union[service.CreateAssetRequest, dict]] = None, - *, - parent: Optional[str] = None, - asset: Optional[resources.Asset] = None, - asset_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]]): - The request object. Create asset request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset (:class:`google.cloud.dataplex_v1.types.Asset`): - Required. Asset resource. - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_id (:class:`str`): - Required. Asset identifier. This ID will be used to - generate names such as table names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - - This corresponds to the ``asset_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, asset, asset_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateAssetRequest): - request = service.CreateAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if asset is not None: - request.asset = asset - if asset_id is not None: - request.asset_id = asset_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_asset(self, - request: Optional[Union[service.UpdateAssetRequest, dict]] = None, - *, - asset: Optional[resources.Asset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]]): - The request object. Update asset request. - asset (:class:`google.cloud.dataplex_v1.types.Asset`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([asset, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateAssetRequest): - request = service.UpdateAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if asset is not None: - request.asset = asset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("asset.name", request.asset.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_asset(self, - request: Optional[Union[service.DeleteAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]]): - The request object. Delete asset request. - name (:class:`str`): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteAssetRequest): - request = service.DeleteAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_assets(self, - request: Optional[Union[service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAssetsAsyncPager: - r"""Lists asset resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]]): - The request object. List assets request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager: - List assets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetsRequest): - request = service.ListAssetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_asset(self, - request: Optional[Union[service.GetAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Asset: - r"""Retrieves an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_asset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]]): - The request object. Get asset request. - name (:class:`str`): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Asset: - An asset represents a cloud resource - that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAssetRequest): - request = service.GetAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_asset_actions(self, - request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAssetActionsAsyncPager: - r"""Lists action resources in an asset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]]): - The request object. List asset actions request. - parent (:class:`str`): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetActionsRequest): - request = service.ListAssetActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_asset_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_task(self, - request: Optional[Union[service.CreateTaskRequest, dict]] = None, - *, - parent: Optional[str] = None, - task: Optional[tasks.Task] = None, - task_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a task resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]]): - The request object. Create task request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (:class:`google.cloud.dataplex_v1.types.Task`): - Required. Task resource. - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (:class:`str`): - Required. Task identifier. - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task, task_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateTaskRequest): - request = service.CreateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - if task_id is not None: - request.task_id = task_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_task(self, - request: Optional[Union[service.UpdateTaskRequest, dict]] = None, - *, - task: Optional[tasks.Task] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Update the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]]): - The request object. Update task request. - task (:class:`google.cloud.dataplex_v1.types.Task`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([task, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTaskRequest): - request = service.UpdateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if task is not None: - request.task = task - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("task.name", request.task.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_task(self, - request: Optional[Union[service.DeleteTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]]): - The request object. Delete task request. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteTaskRequest): - request = service.DeleteTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: Optional[Union[service.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""Lists tasks under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]]): - The request object. List tasks request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager: - List tasks response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTasksRequest): - request = service.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: Optional[Union[service.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tasks.Task: - r"""Get task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]]): - The request object. Get task request. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Task: - A task represents a user-visible job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTaskRequest): - request = service.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[service.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists Jobs under the given task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]]): - The request object. List jobs request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager: - List jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListJobsRequest): - request = service.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_task(self, - request: Optional[Union[service.RunTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.RunTaskResponse: - r"""Run an on demand execution of a Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.run_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]]): - The request object. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.RunTaskResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.RunTaskRequest): - request = service.RunTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[service.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tasks.Job: - r"""Get job resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]]): - The request object. Get job request. - name (:class:`str`): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Job: - A job represents an instance of a - task. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetJobRequest): - request = service.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_job(self, - request: Optional[Union[service.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Cancel jobs running for the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_job(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]]): - The request object. Cancel task jobs. - name (:class:`str`): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CancelJobRequest): - request = service.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_environment(self, - request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - environment: Optional[analyze.Environment] = None, - environment_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create an environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]]): - The request object. Create environment request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment (:class:`google.cloud.dataplex_v1.types.Environment`): - Required. Environment resource. - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment_id (:class:`str`): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - - This corresponds to the ``environment_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, environment, environment_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateEnvironmentRequest): - request = service.CreateEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if environment is not None: - request.environment = environment - if environment_id is not None: - request.environment_id = environment_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_environment(self, - request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, - *, - environment: Optional[analyze.Environment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Update the environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]]): - The request object. Update environment request. - environment (:class:`google.cloud.dataplex_v1.types.Environment`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([environment, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateEnvironmentRequest): - request = service.UpdateEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if environment is not None: - request.environment = environment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("environment.name", request.environment.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_environment(self, - request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]]): - The request object. Delete environment request. - name (:class:`str`): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteEnvironmentRequest): - request = service.DeleteEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_environments(self, - request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEnvironmentsAsyncPager: - r"""Lists environments under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]]): - The request object. List environments request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: - List environments response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListEnvironmentsRequest): - request = service.ListEnvironmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_environments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEnvironmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_environment(self, - request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Environment: - r"""Get environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_environment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]]): - The request object. Get environment request. - name (:class:`str`): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Environment: - Environment represents a user-visible - compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetEnvironmentRequest): - request = service.GetEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_sessions(self, - request: Optional[Union[service.ListSessionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSessionsAsyncPager: - r"""Lists session resources in an environment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]]): - The request object. List sessions request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: - List sessions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListSessionsRequest): - request = service.ListSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSessionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataplexServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataplexServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py deleted file mode 100644 index 81c523bf4d5d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ /dev/null @@ -1,4933 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataplexServiceGrpcTransport -from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport - - -class DataplexServiceClientMeta(type): - """Metaclass for the DataplexService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] - _transport_registry["grpc"] = DataplexServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataplexServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataplexServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataplexServiceClient(metaclass=DataplexServiceClientMeta): - """Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataplexServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataplexServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def action_path(project: str,location: str,lake: str,action: str,) -> str: - """Returns a fully-qualified action string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) - - @staticmethod - def parse_action_path(path: str) -> Dict[str,str]: - """Parses a action path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/actions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def asset_path(project: str,location: str,lake: str,zone: str,asset: str,) -> str: - """Returns a fully-qualified asset string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) - - @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: - """Parses a asset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/assets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def environment_path(project: str,location: str,lake: str,environment: str,) -> str: - """Returns a fully-qualified environment string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) - - @staticmethod - def parse_environment_path(path: str) -> Dict[str,str]: - """Parses a environment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_path(project: str,location: str,lake: str,task: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lake_path(project: str,location: str,lake: str,) -> str: - """Returns a fully-qualified lake string.""" - return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - - @staticmethod - def parse_lake_path(path: str) -> Dict[str,str]: - """Parses a lake path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def session_path(project: str,location: str,lake: str,environment: str,session: str,) -> str: - """Returns a fully-qualified session string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) - - @staticmethod - def parse_session_path(path: str) -> Dict[str,str]: - """Parses a session path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)/sessions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,lake: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def zone_path(project: str,location: str,lake: str,zone: str,) -> str: - """Returns a fully-qualified zone string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - - @staticmethod - def parse_zone_path(path: str) -> Dict[str,str]: - """Parses a zone path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataplexServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dataplex service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataplexServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataplexServiceClient._read_environment_variables() - self._client_cert_source = DataplexServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataplexServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataplexServiceTransport) - if transport_provided: - # transport is a DataplexServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataplexServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataplexServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport]] = ( - DataplexServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataplexServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_lake(self, - request: Optional[Union[service.CreateLakeRequest, dict]] = None, - *, - parent: Optional[str] = None, - lake: Optional[resources.Lake] = None, - lake_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]): - The request object. Create lake request. - parent (str): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Lake resource - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake_id (str): - Required. Lake identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / - location. - - This corresponds to the ``lake_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lake, lake_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateLakeRequest): - request = service.CreateLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lake is not None: - request.lake = lake - if lake_id is not None: - request.lake_id = lake_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_lake(self, - request: Optional[Union[service.UpdateLakeRequest, dict]] = None, - *, - lake: Optional[resources.Lake] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]): - The request object. Update lake request. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([lake, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateLakeRequest): - request = service.UpdateLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if lake is not None: - request.lake = lake - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("lake.name", request.lake.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_lake(self, - request: Optional[Union[service.DeleteLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]): - The request object. Delete lake request. - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteLakeRequest): - request = service.DeleteLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_lakes(self, - request: Optional[Union[service.ListLakesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLakesPager: - r"""Lists lake resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]): - The request object. List lakes request. - parent (str): - Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager: - List lakes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakesRequest): - request = service.ListLakesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_lakes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLakesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_lake(self, - request: Optional[Union[service.GetLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Lake: - r"""Retrieves a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = client.get_lake(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]): - The request object. Get lake request. - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Lake: - A lake is a centralized repository - for managing enterprise data across the - organization distributed across many - cloud projects, and stored in a variety - of storage services such as Google Cloud - Storage and BigQuery. The resources - attached to a lake are referred to as - managed resources. Data within these - managed resources can be structured or - unstructured. A lake provides data - admins with tools to organize, secure - and manage their data at scale, and - provides data scientists and data - engineers an integrated experience to - easily search, discover, analyze and - transform data and associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetLakeRequest): - request = service.GetLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_lake_actions(self, - request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLakeActionsPager: - r"""Lists action resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]): - The request object. List lake actions request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakeActionsRequest): - request = service.ListLakeActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_lake_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLakeActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_zone(self, - request: Optional[Union[service.CreateZoneRequest, dict]] = None, - *, - parent: Optional[str] = None, - zone: Optional[resources.Zone] = None, - zone_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a zone resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]): - The request object. Create zone request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Zone resource. - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone_id (str): - Required. Zone identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in - a project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - - This corresponds to the ``zone_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, zone, zone_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateZoneRequest): - request = service.CreateZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if zone is not None: - request.zone = zone - if zone_id is not None: - request.zone_id = zone_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_zone(self, - request: Optional[Union[service.UpdateZoneRequest, dict]] = None, - *, - zone: Optional[resources.Zone] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]): - The request object. Update zone request. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([zone, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateZoneRequest): - request = service.UpdateZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if zone is not None: - request.zone = zone - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("zone.name", request.zone.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_zone(self, - request: Optional[Union[service.DeleteZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]): - The request object. Delete zone request. - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteZoneRequest): - request = service.DeleteZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_zones(self, - request: Optional[Union[service.ListZonesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListZonesPager: - r"""Lists zone resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]): - The request object. List zones request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager: - List zones response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZonesRequest): - request = service.ListZonesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_zones] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListZonesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_zone(self, - request: Optional[Union[service.GetZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Zone: - r"""Retrieves a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = client.get_zone(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]): - The request object. Get zone request. - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Zone: - A zone represents a logical group of - related assets within a lake. A zone can - be used to map to organizational - structure or represent stages of data - readiness from raw to curated. It - provides managing behavior that is - shared or inherited by all contained - assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetZoneRequest): - request = service.GetZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_zone_actions(self, - request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListZoneActionsPager: - r"""Lists action resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]): - The request object. List zone actions request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZoneActionsRequest): - request = service.ListZoneActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_zone_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListZoneActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_asset(self, - request: Optional[Union[service.CreateAssetRequest, dict]] = None, - *, - parent: Optional[str] = None, - asset: Optional[resources.Asset] = None, - asset_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]): - The request object. Create asset request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Asset resource. - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_id (str): - Required. Asset identifier. This ID will be used to - generate names such as table names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - - This corresponds to the ``asset_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, asset, asset_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateAssetRequest): - request = service.CreateAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if asset is not None: - request.asset = asset - if asset_id is not None: - request.asset_id = asset_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_asset(self, - request: Optional[Union[service.UpdateAssetRequest, dict]] = None, - *, - asset: Optional[resources.Asset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]): - The request object. Update asset request. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([asset, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateAssetRequest): - request = service.UpdateAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if asset is not None: - request.asset = asset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("asset.name", request.asset.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_asset(self, - request: Optional[Union[service.DeleteAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]): - The request object. Delete asset request. - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteAssetRequest): - request = service.DeleteAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_assets(self, - request: Optional[Union[service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAssetsPager: - r"""Lists asset resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]): - The request object. List assets request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager: - List assets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetsRequest): - request = service.ListAssetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_asset(self, - request: Optional[Union[service.GetAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Asset: - r"""Retrieves an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = client.get_asset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]): - The request object. Get asset request. - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Asset: - An asset represents a cloud resource - that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAssetRequest): - request = service.GetAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_asset_actions(self, - request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAssetActionsPager: - r"""Lists action resources in an asset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]): - The request object. List asset actions request. - parent (str): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetActionsRequest): - request = service.ListAssetActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_asset_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_task(self, - request: Optional[Union[service.CreateTaskRequest, dict]] = None, - *, - parent: Optional[str] = None, - task: Optional[tasks.Task] = None, - task_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a task resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]): - The request object. Create task request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (google.cloud.dataplex_v1.types.Task): - Required. Task resource. - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (str): - Required. Task identifier. - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task, task_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateTaskRequest): - request = service.CreateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - if task_id is not None: - request.task_id = task_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_task(self, - request: Optional[Union[service.UpdateTaskRequest, dict]] = None, - *, - task: Optional[tasks.Task] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Update the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]): - The request object. Update task request. - task (google.cloud.dataplex_v1.types.Task): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([task, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTaskRequest): - request = service.UpdateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if task is not None: - request.task = task - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("task.name", request.task.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_task(self, - request: Optional[Union[service.DeleteTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Delete the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]): - The request object. Delete task request. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteTaskRequest): - request = service.DeleteTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: Optional[Union[service.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksPager: - r"""Lists tasks under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]): - The request object. List tasks request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager: - List tasks response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTasksRequest): - request = service.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: Optional[Union[service.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tasks.Task: - r"""Get task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]): - The request object. Get task request. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Task: - A task represents a user-visible job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTaskRequest): - request = service.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[service.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists Jobs under the given task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]): - The request object. List jobs request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager: - List jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListJobsRequest): - request = service.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_task(self, - request: Optional[Union[service.RunTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> service.RunTaskResponse: - r"""Run an on demand execution of a Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = client.run_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]): - The request object. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.RunTaskResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.RunTaskRequest): - request = service.RunTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[service.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tasks.Job: - r"""Get job resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]): - The request object. Get job request. - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Job: - A job represents an instance of a - task. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetJobRequest): - request = service.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_job(self, - request: Optional[Union[service.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Cancel jobs running for the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_job(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]): - The request object. Cancel task jobs. - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CancelJobRequest): - request = service.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_environment(self, - request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - environment: Optional[analyze.Environment] = None, - environment_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Create an environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): - The request object. Create environment request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Environment resource. - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment_id (str): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - - This corresponds to the ``environment_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, environment, environment_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateEnvironmentRequest): - request = service.CreateEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if environment is not None: - request.environment = environment - if environment_id is not None: - request.environment_id = environment_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_environment(self, - request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, - *, - environment: Optional[analyze.Environment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Update the environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): - The request object. Update environment request. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([environment, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateEnvironmentRequest): - request = service.UpdateEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if environment is not None: - request.environment = environment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("environment.name", request.environment.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_environment(self, - request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): - The request object. Delete environment request. - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteEnvironmentRequest): - request = service.DeleteEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_environments(self, - request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEnvironmentsPager: - r"""Lists environments under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): - The request object. List environments request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: - List environments response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListEnvironmentsRequest): - request = service.ListEnvironmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_environments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEnvironmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_environment(self, - request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> analyze.Environment: - r"""Get environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_environment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): - The request object. Get environment request. - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Environment: - Environment represents a user-visible - compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetEnvironmentRequest): - request = service.GetEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_sessions(self, - request: Optional[Union[service.ListSessionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSessionsPager: - r"""Lists session resources in an environment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): - The request object. List sessions request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: - List sessions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListSessionsRequest): - request = service.ListSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSessionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataplexServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataplexServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py deleted file mode 100644 index 394c731b72c9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py +++ /dev/null @@ -1,1380 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks - - -class ListLakesPager: - """A pager for iterating through ``list_lakes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``lakes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLakes`` requests and continue to iterate - through the ``lakes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListLakesResponse], - request: service.ListLakesRequest, - response: service.ListLakesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListLakesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListLakesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListLakesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Lake]: - for page in self.pages: - yield from page.lakes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakesAsyncPager: - """A pager for iterating through ``list_lakes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``lakes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLakes`` requests and continue to iterate - through the ``lakes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListLakesResponse]], - request: service.ListLakesRequest, - response: service.ListLakesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListLakesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListLakesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListLakesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Lake]: - async def async_generator(): - async for page in self.pages: - for response in page.lakes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakeActionsPager: - """A pager for iterating through ``list_lake_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLakeActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListLakeActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListLakeActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakeActionsAsyncPager: - """A pager for iterating through ``list_lake_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLakeActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListLakeActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListLakeActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZonesPager: - """A pager for iterating through ``list_zones`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``zones`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListZones`` requests and continue to iterate - through the ``zones`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListZonesResponse], - request: service.ListZonesRequest, - response: service.ListZonesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZonesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListZonesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListZonesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListZonesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Zone]: - for page in self.pages: - yield from page.zones - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZonesAsyncPager: - """A pager for iterating through ``list_zones`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``zones`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListZones`` requests and continue to iterate - through the ``zones`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListZonesResponse]], - request: service.ListZonesRequest, - response: service.ListZonesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZonesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListZonesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListZonesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListZonesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Zone]: - async def async_generator(): - async for page in self.pages: - for response in page.zones: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZoneActionsPager: - """A pager for iterating through ``list_zone_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListZoneActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListZoneActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListZoneActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZoneActionsAsyncPager: - """A pager for iterating through ``list_zone_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListZoneActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListZoneActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListZoneActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListAssetsResponse], - request: service.ListAssetsRequest, - response: service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Asset]: - for page in self.pages: - yield from page.assets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsAsyncPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListAssetsResponse]], - request: service.ListAssetsRequest, - response: service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Asset]: - async def async_generator(): - async for page in self.pages: - for response in page.assets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetActionsPager: - """A pager for iterating through ``list_asset_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssetActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListAssetActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListAssetActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetActionsAsyncPager: - """A pager for iterating through ``list_asset_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssetActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListAssetActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListAssetActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListTasksResponse], - request: service.ListTasksRequest, - response: service.ListTasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tasks.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListTasksResponse]], - request: service.ListTasksRequest, - response: service.ListTasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tasks.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListJobsResponse], - request: service.ListJobsRequest, - response: service.ListJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tasks.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListJobsResponse]], - request: service.ListJobsRequest, - response: service.ListJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tasks.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEnvironmentsPager: - """A pager for iterating through ``list_environments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``environments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEnvironments`` requests and continue to iterate - through the ``environments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListEnvironmentsResponse], - request: service.ListEnvironmentsRequest, - response: service.ListEnvironmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListEnvironmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListEnvironmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Environment]: - for page in self.pages: - yield from page.environments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEnvironmentsAsyncPager: - """A pager for iterating through ``list_environments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``environments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEnvironments`` requests and continue to iterate - through the ``environments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], - request: service.ListEnvironmentsRequest, - response: service.ListEnvironmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListEnvironmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListEnvironmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Environment]: - async def async_generator(): - async for page in self.pages: - for response in page.environments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListSessionsResponse], - request: service.ListSessionsRequest, - response: service.ListSessionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Session]: - for page in self.pages: - yield from page.sessions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsAsyncPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListSessionsResponse]], - request: service.ListSessionsRequest, - response: service.ListSessionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = service.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Session]: - async def async_generator(): - async for page in self.pages: - for response in page.sessions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst deleted file mode 100644 index a70e22115784..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataplexServiceTransport` is the ABC for all transports. -- public child `DataplexServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataplexServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataplexServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataplexServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py deleted file mode 100644 index 4d9372a1b1ae..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataplexServiceTransport -from .grpc import DataplexServiceGrpcTransport -from .grpc_asyncio import DataplexServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] -_transport_registry['grpc'] = DataplexServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataplexServiceGrpcAsyncIOTransport - -__all__ = ( - 'DataplexServiceTransport', - 'DataplexServiceGrpcTransport', - 'DataplexServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py deleted file mode 100644 index 4a351d5dd59b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py +++ /dev/null @@ -1,833 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataplexServiceTransport(abc.ABC): - """Abstract transport class for DataplexService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_lake: gapic_v1.method.wrap_method( - self.create_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.update_lake: gapic_v1.method.wrap_method( - self.update_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_lake: gapic_v1.method.wrap_method( - self.delete_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.list_lakes: gapic_v1.method.wrap_method( - self.list_lakes, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_lake: gapic_v1.method.wrap_method( - self.get_lake, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_lake_actions: gapic_v1.method.wrap_method( - self.list_lake_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_zone: gapic_v1.method.wrap_method( - self.create_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.update_zone: gapic_v1.method.wrap_method( - self.update_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_zone: gapic_v1.method.wrap_method( - self.delete_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.list_zones: gapic_v1.method.wrap_method( - self.list_zones, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_zone: gapic_v1.method.wrap_method( - self.get_zone, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_zone_actions: gapic_v1.method.wrap_method( - self.list_zone_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_asset: gapic_v1.method.wrap_method( - self.create_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_asset: gapic_v1.method.wrap_method( - self.update_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_asset: gapic_v1.method.wrap_method( - self.delete_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: gapic_v1.method.wrap_method( - self.list_assets, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_asset: gapic_v1.method.wrap_method( - self.get_asset, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_asset_actions: gapic_v1.method.wrap_method( - self.list_asset_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_task: gapic_v1.method.wrap_method( - self.create_task, - default_timeout=60.0, - client_info=client_info, - ), - self.update_task: gapic_v1.method.wrap_method( - self.update_task, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_task: gapic_v1.method.wrap_method( - self.delete_task, - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_task: gapic_v1.method.wrap_method( - self.run_task, - default_timeout=None, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_environment: gapic_v1.method.wrap_method( - self.create_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.update_environment: gapic_v1.method.wrap_method( - self.update_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_environment: gapic_v1.method.wrap_method( - self.delete_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.list_environments: gapic_v1.method.wrap_method( - self.list_environments, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_environment: gapic_v1.method.wrap_method( - self.get_environment, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_sessions: gapic_v1.method.wrap_method( - self.list_sessions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - Union[ - service.ListLakesResponse, - Awaitable[service.ListLakesResponse] - ]]: - raise NotImplementedError() - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - Union[ - resources.Lake, - Awaitable[resources.Lake] - ]]: - raise NotImplementedError() - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - Union[ - service.ListZonesResponse, - Awaitable[service.ListZonesResponse] - ]]: - raise NotImplementedError() - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - Union[ - resources.Zone, - Awaitable[resources.Zone] - ]]: - raise NotImplementedError() - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - Union[ - service.ListAssetsResponse, - Awaitable[service.ListAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - Union[ - resources.Asset, - Awaitable[resources.Asset] - ]]: - raise NotImplementedError() - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - Union[ - service.ListTasksResponse, - Awaitable[service.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - Union[ - tasks.Task, - Awaitable[tasks.Task] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - Union[ - service.ListJobsResponse, - Awaitable[service.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - Union[ - service.RunTaskResponse, - Awaitable[service.RunTaskResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - Union[ - tasks.Job, - Awaitable[tasks.Job] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - Union[ - service.ListEnvironmentsResponse, - Awaitable[service.ListEnvironmentsResponse] - ]]: - raise NotImplementedError() - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - Union[ - analyze.Environment, - Awaitable[analyze.Environment] - ]]: - raise NotImplementedError() - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - Union[ - service.ListSessionsResponse, - Awaitable[service.ListSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataplexServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py deleted file mode 100644 index 8760eb831d23..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py +++ /dev/null @@ -1,1247 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO - - -class DataplexServiceGrpcTransport(DataplexServiceTransport): - """gRPC backend transport for DataplexService. - - Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create lake method over gRPC. - - Creates a lake resource. - - Returns: - Callable[[~.CreateLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lake' not in self._stubs: - self._stubs['create_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateLake', - request_serializer=service.CreateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_lake'] - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update lake method over gRPC. - - Updates a lake resource. - - Returns: - Callable[[~.UpdateLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_lake' not in self._stubs: - self._stubs['update_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateLake', - request_serializer=service.UpdateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_lake'] - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete lake method over gRPC. - - Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - Returns: - Callable[[~.DeleteLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lake' not in self._stubs: - self._stubs['delete_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteLake', - request_serializer=service.DeleteLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_lake'] - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - service.ListLakesResponse]: - r"""Return a callable for the list lakes method over gRPC. - - Lists lake resources in a project and location. - - Returns: - Callable[[~.ListLakesRequest], - ~.ListLakesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lakes' not in self._stubs: - self._stubs['list_lakes'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakes', - request_serializer=service.ListLakesRequest.serialize, - response_deserializer=service.ListLakesResponse.deserialize, - ) - return self._stubs['list_lakes'] - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - resources.Lake]: - r"""Return a callable for the get lake method over gRPC. - - Retrieves a lake resource. - - Returns: - Callable[[~.GetLakeRequest], - ~.Lake]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lake' not in self._stubs: - self._stubs['get_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetLake', - request_serializer=service.GetLakeRequest.serialize, - response_deserializer=resources.Lake.deserialize, - ) - return self._stubs['get_lake'] - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list lake actions method over gRPC. - - Lists action resources in a lake. - - Returns: - Callable[[~.ListLakeActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lake_actions' not in self._stubs: - self._stubs['list_lake_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', - request_serializer=service.ListLakeActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_lake_actions'] - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the create zone method over gRPC. - - Creates a zone resource within a lake. - - Returns: - Callable[[~.CreateZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_zone' not in self._stubs: - self._stubs['create_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateZone', - request_serializer=service.CreateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_zone'] - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the update zone method over gRPC. - - Updates a zone resource. - - Returns: - Callable[[~.UpdateZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_zone' not in self._stubs: - self._stubs['update_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateZone', - request_serializer=service.UpdateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_zone'] - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete zone method over gRPC. - - Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - Returns: - Callable[[~.DeleteZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_zone' not in self._stubs: - self._stubs['delete_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteZone', - request_serializer=service.DeleteZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_zone'] - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - service.ListZonesResponse]: - r"""Return a callable for the list zones method over gRPC. - - Lists zone resources in a lake. - - Returns: - Callable[[~.ListZonesRequest], - ~.ListZonesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zones' not in self._stubs: - self._stubs['list_zones'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZones', - request_serializer=service.ListZonesRequest.serialize, - response_deserializer=service.ListZonesResponse.deserialize, - ) - return self._stubs['list_zones'] - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - resources.Zone]: - r"""Return a callable for the get zone method over gRPC. - - Retrieves a zone resource. - - Returns: - Callable[[~.GetZoneRequest], - ~.Zone]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_zone' not in self._stubs: - self._stubs['get_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetZone', - request_serializer=service.GetZoneRequest.serialize, - response_deserializer=resources.Zone.deserialize, - ) - return self._stubs['get_zone'] - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list zone actions method over gRPC. - - Lists action resources in a zone. - - Returns: - Callable[[~.ListZoneActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zone_actions' not in self._stubs: - self._stubs['list_zone_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', - request_serializer=service.ListZoneActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_zone_actions'] - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the create asset method over gRPC. - - Creates an asset resource. - - Returns: - Callable[[~.CreateAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_asset' not in self._stubs: - self._stubs['create_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateAsset', - request_serializer=service.CreateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_asset'] - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the update asset method over gRPC. - - Updates an asset resource. - - Returns: - Callable[[~.UpdateAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_asset' not in self._stubs: - self._stubs['update_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', - request_serializer=service.UpdateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_asset'] - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete asset method over gRPC. - - Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - Returns: - Callable[[~.DeleteAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_asset' not in self._stubs: - self._stubs['delete_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', - request_serializer=service.DeleteAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_asset'] - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - service.ListAssetsResponse]: - r"""Return a callable for the list assets method over gRPC. - - Lists asset resources in a zone. - - Returns: - Callable[[~.ListAssetsRequest], - ~.ListAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssets', - request_serializer=service.ListAssetsRequest.serialize, - response_deserializer=service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - resources.Asset]: - r"""Return a callable for the get asset method over gRPC. - - Retrieves an asset resource. - - Returns: - Callable[[~.GetAssetRequest], - ~.Asset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_asset' not in self._stubs: - self._stubs['get_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetAsset', - request_serializer=service.GetAssetRequest.serialize, - response_deserializer=resources.Asset.deserialize, - ) - return self._stubs['get_asset'] - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list asset actions method over gRPC. - - Lists action resources in an asset. - - Returns: - Callable[[~.ListAssetActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_asset_actions' not in self._stubs: - self._stubs['list_asset_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', - request_serializer=service.ListAssetActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_asset_actions'] - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the create task method over gRPC. - - Creates a task resource within a lake. - - Returns: - Callable[[~.CreateTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateTask', - request_serializer=service.CreateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_task'] - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the update task method over gRPC. - - Update the task resource. - - Returns: - Callable[[~.UpdateTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_task' not in self._stubs: - self._stubs['update_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateTask', - request_serializer=service.UpdateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_task'] - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete task method over gRPC. - - Delete the task resource. - - Returns: - Callable[[~.DeleteTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteTask', - request_serializer=service.DeleteTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_task'] - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - service.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - Lists tasks under the given lake. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListTasks', - request_serializer=service.ListTasksRequest.serialize, - response_deserializer=service.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - tasks.Task]: - r"""Return a callable for the get task method over gRPC. - - Get task resource. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetTask', - request_serializer=service.GetTaskRequest.serialize, - response_deserializer=tasks.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - service.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs under the given task. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListJobs', - request_serializer=service.ListJobsRequest.serialize, - response_deserializer=service.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - service.RunTaskResponse]: - r"""Return a callable for the run task method over gRPC. - - Run an on demand execution of a Task. - - Returns: - Callable[[~.RunTaskRequest], - ~.RunTaskResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/RunTask', - request_serializer=service.RunTaskRequest.serialize, - response_deserializer=service.RunTaskResponse.deserialize, - ) - return self._stubs['run_task'] - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - tasks.Job]: - r"""Return a callable for the get job method over gRPC. - - Get job resource. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetJob', - request_serializer=service.GetJobRequest.serialize, - response_deserializer=tasks.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel jobs running for the task resource. - - Returns: - Callable[[~.CancelJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CancelJob', - request_serializer=service.CancelJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_job'] - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the create environment method over gRPC. - - Create an environment resource. - - Returns: - Callable[[~.CreateEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_environment' not in self._stubs: - self._stubs['create_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', - request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_environment'] - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the update environment method over gRPC. - - Update the environment resource. - - Returns: - Callable[[~.UpdateEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_environment' not in self._stubs: - self._stubs['update_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', - request_serializer=service.UpdateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_environment'] - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete environment method over gRPC. - - Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - Returns: - Callable[[~.DeleteEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_environment' not in self._stubs: - self._stubs['delete_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', - request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_environment'] - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - service.ListEnvironmentsResponse]: - r"""Return a callable for the list environments method over gRPC. - - Lists environments under the given lake. - - Returns: - Callable[[~.ListEnvironmentsRequest], - ~.ListEnvironmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_environments' not in self._stubs: - self._stubs['list_environments'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', - request_serializer=service.ListEnvironmentsRequest.serialize, - response_deserializer=service.ListEnvironmentsResponse.deserialize, - ) - return self._stubs['list_environments'] - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - analyze.Environment]: - r"""Return a callable for the get environment method over gRPC. - - Get environment resource. - - Returns: - Callable[[~.GetEnvironmentRequest], - ~.Environment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_environment' not in self._stubs: - self._stubs['get_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', - request_serializer=service.GetEnvironmentRequest.serialize, - response_deserializer=analyze.Environment.deserialize, - ) - return self._stubs['get_environment'] - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - service.ListSessionsResponse]: - r"""Return a callable for the list sessions method over gRPC. - - Lists session resources in an environment. - - Returns: - Callable[[~.ListSessionsRequest], - ~.ListSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListSessions', - request_serializer=service.ListSessionsRequest.serialize, - response_deserializer=service.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataplexServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py deleted file mode 100644 index d9f2b367ca58..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1593 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataplexServiceGrpcTransport - - -class DataplexServiceGrpcAsyncIOTransport(DataplexServiceTransport): - """gRPC AsyncIO backend transport for DataplexService. - - Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create lake method over gRPC. - - Creates a lake resource. - - Returns: - Callable[[~.CreateLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lake' not in self._stubs: - self._stubs['create_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateLake', - request_serializer=service.CreateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_lake'] - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update lake method over gRPC. - - Updates a lake resource. - - Returns: - Callable[[~.UpdateLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_lake' not in self._stubs: - self._stubs['update_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateLake', - request_serializer=service.UpdateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_lake'] - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete lake method over gRPC. - - Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - Returns: - Callable[[~.DeleteLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lake' not in self._stubs: - self._stubs['delete_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteLake', - request_serializer=service.DeleteLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_lake'] - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - Awaitable[service.ListLakesResponse]]: - r"""Return a callable for the list lakes method over gRPC. - - Lists lake resources in a project and location. - - Returns: - Callable[[~.ListLakesRequest], - Awaitable[~.ListLakesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lakes' not in self._stubs: - self._stubs['list_lakes'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakes', - request_serializer=service.ListLakesRequest.serialize, - response_deserializer=service.ListLakesResponse.deserialize, - ) - return self._stubs['list_lakes'] - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - Awaitable[resources.Lake]]: - r"""Return a callable for the get lake method over gRPC. - - Retrieves a lake resource. - - Returns: - Callable[[~.GetLakeRequest], - Awaitable[~.Lake]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lake' not in self._stubs: - self._stubs['get_lake'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetLake', - request_serializer=service.GetLakeRequest.serialize, - response_deserializer=resources.Lake.deserialize, - ) - return self._stubs['get_lake'] - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list lake actions method over gRPC. - - Lists action resources in a lake. - - Returns: - Callable[[~.ListLakeActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lake_actions' not in self._stubs: - self._stubs['list_lake_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', - request_serializer=service.ListLakeActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_lake_actions'] - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create zone method over gRPC. - - Creates a zone resource within a lake. - - Returns: - Callable[[~.CreateZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_zone' not in self._stubs: - self._stubs['create_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateZone', - request_serializer=service.CreateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_zone'] - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update zone method over gRPC. - - Updates a zone resource. - - Returns: - Callable[[~.UpdateZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_zone' not in self._stubs: - self._stubs['update_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateZone', - request_serializer=service.UpdateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_zone'] - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete zone method over gRPC. - - Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - Returns: - Callable[[~.DeleteZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_zone' not in self._stubs: - self._stubs['delete_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteZone', - request_serializer=service.DeleteZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_zone'] - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - Awaitable[service.ListZonesResponse]]: - r"""Return a callable for the list zones method over gRPC. - - Lists zone resources in a lake. - - Returns: - Callable[[~.ListZonesRequest], - Awaitable[~.ListZonesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zones' not in self._stubs: - self._stubs['list_zones'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZones', - request_serializer=service.ListZonesRequest.serialize, - response_deserializer=service.ListZonesResponse.deserialize, - ) - return self._stubs['list_zones'] - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - Awaitable[resources.Zone]]: - r"""Return a callable for the get zone method over gRPC. - - Retrieves a zone resource. - - Returns: - Callable[[~.GetZoneRequest], - Awaitable[~.Zone]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_zone' not in self._stubs: - self._stubs['get_zone'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetZone', - request_serializer=service.GetZoneRequest.serialize, - response_deserializer=resources.Zone.deserialize, - ) - return self._stubs['get_zone'] - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list zone actions method over gRPC. - - Lists action resources in a zone. - - Returns: - Callable[[~.ListZoneActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zone_actions' not in self._stubs: - self._stubs['list_zone_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', - request_serializer=service.ListZoneActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_zone_actions'] - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create asset method over gRPC. - - Creates an asset resource. - - Returns: - Callable[[~.CreateAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_asset' not in self._stubs: - self._stubs['create_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateAsset', - request_serializer=service.CreateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_asset'] - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update asset method over gRPC. - - Updates an asset resource. - - Returns: - Callable[[~.UpdateAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_asset' not in self._stubs: - self._stubs['update_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', - request_serializer=service.UpdateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_asset'] - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete asset method over gRPC. - - Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - Returns: - Callable[[~.DeleteAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_asset' not in self._stubs: - self._stubs['delete_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', - request_serializer=service.DeleteAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_asset'] - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - Awaitable[service.ListAssetsResponse]]: - r"""Return a callable for the list assets method over gRPC. - - Lists asset resources in a zone. - - Returns: - Callable[[~.ListAssetsRequest], - Awaitable[~.ListAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssets', - request_serializer=service.ListAssetsRequest.serialize, - response_deserializer=service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - Awaitable[resources.Asset]]: - r"""Return a callable for the get asset method over gRPC. - - Retrieves an asset resource. - - Returns: - Callable[[~.GetAssetRequest], - Awaitable[~.Asset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_asset' not in self._stubs: - self._stubs['get_asset'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetAsset', - request_serializer=service.GetAssetRequest.serialize, - response_deserializer=resources.Asset.deserialize, - ) - return self._stubs['get_asset'] - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list asset actions method over gRPC. - - Lists action resources in an asset. - - Returns: - Callable[[~.ListAssetActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_asset_actions' not in self._stubs: - self._stubs['list_asset_actions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', - request_serializer=service.ListAssetActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_asset_actions'] - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create task method over gRPC. - - Creates a task resource within a lake. - - Returns: - Callable[[~.CreateTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateTask', - request_serializer=service.CreateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_task'] - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update task method over gRPC. - - Update the task resource. - - Returns: - Callable[[~.UpdateTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_task' not in self._stubs: - self._stubs['update_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateTask', - request_serializer=service.UpdateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_task'] - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete task method over gRPC. - - Delete the task resource. - - Returns: - Callable[[~.DeleteTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteTask', - request_serializer=service.DeleteTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_task'] - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - Awaitable[service.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - Lists tasks under the given lake. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListTasks', - request_serializer=service.ListTasksRequest.serialize, - response_deserializer=service.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - Awaitable[tasks.Task]]: - r"""Return a callable for the get task method over gRPC. - - Get task resource. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetTask', - request_serializer=service.GetTaskRequest.serialize, - response_deserializer=tasks.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - Awaitable[service.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs under the given task. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListJobs', - request_serializer=service.ListJobsRequest.serialize, - response_deserializer=service.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - Awaitable[service.RunTaskResponse]]: - r"""Return a callable for the run task method over gRPC. - - Run an on demand execution of a Task. - - Returns: - Callable[[~.RunTaskRequest], - Awaitable[~.RunTaskResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/RunTask', - request_serializer=service.RunTaskRequest.serialize, - response_deserializer=service.RunTaskResponse.deserialize, - ) - return self._stubs['run_task'] - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - Awaitable[tasks.Job]]: - r"""Return a callable for the get job method over gRPC. - - Get job resource. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetJob', - request_serializer=service.GetJobRequest.serialize, - response_deserializer=tasks.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel jobs running for the task resource. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CancelJob', - request_serializer=service.CancelJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_job'] - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create environment method over gRPC. - - Create an environment resource. - - Returns: - Callable[[~.CreateEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_environment' not in self._stubs: - self._stubs['create_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', - request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_environment'] - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update environment method over gRPC. - - Update the environment resource. - - Returns: - Callable[[~.UpdateEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_environment' not in self._stubs: - self._stubs['update_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', - request_serializer=service.UpdateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_environment'] - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete environment method over gRPC. - - Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - Returns: - Callable[[~.DeleteEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_environment' not in self._stubs: - self._stubs['delete_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', - request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_environment'] - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - Awaitable[service.ListEnvironmentsResponse]]: - r"""Return a callable for the list environments method over gRPC. - - Lists environments under the given lake. - - Returns: - Callable[[~.ListEnvironmentsRequest], - Awaitable[~.ListEnvironmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_environments' not in self._stubs: - self._stubs['list_environments'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', - request_serializer=service.ListEnvironmentsRequest.serialize, - response_deserializer=service.ListEnvironmentsResponse.deserialize, - ) - return self._stubs['list_environments'] - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - Awaitable[analyze.Environment]]: - r"""Return a callable for the get environment method over gRPC. - - Get environment resource. - - Returns: - Callable[[~.GetEnvironmentRequest], - Awaitable[~.Environment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_environment' not in self._stubs: - self._stubs['get_environment'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', - request_serializer=service.GetEnvironmentRequest.serialize, - response_deserializer=analyze.Environment.deserialize, - ) - return self._stubs['get_environment'] - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - Awaitable[service.ListSessionsResponse]]: - r"""Return a callable for the list sessions method over gRPC. - - Lists session resources in an environment. - - Returns: - Callable[[~.ListSessionsRequest], - Awaitable[~.ListSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListSessions', - request_serializer=service.ListSessionsRequest.serialize, - response_deserializer=service.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_lake: self._wrap_method( - self.create_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.update_lake: self._wrap_method( - self.update_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_lake: self._wrap_method( - self.delete_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.list_lakes: self._wrap_method( - self.list_lakes, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_lake: self._wrap_method( - self.get_lake, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_lake_actions: self._wrap_method( - self.list_lake_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_zone: self._wrap_method( - self.create_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.update_zone: self._wrap_method( - self.update_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_zone: self._wrap_method( - self.delete_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.list_zones: self._wrap_method( - self.list_zones, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_zone: self._wrap_method( - self.get_zone, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_zone_actions: self._wrap_method( - self.list_zone_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_asset: self._wrap_method( - self.create_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_asset: self._wrap_method( - self.update_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_asset: self._wrap_method( - self.delete_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: self._wrap_method( - self.list_assets, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_asset: self._wrap_method( - self.get_asset, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_asset_actions: self._wrap_method( - self.list_asset_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_task: self._wrap_method( - self.create_task, - default_timeout=60.0, - client_info=client_info, - ), - self.update_task: self._wrap_method( - self.update_task, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_task: self._wrap_method( - self.delete_task, - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: self._wrap_method( - self.list_tasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: self._wrap_method( - self.get_task, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: self._wrap_method( - self.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_task: self._wrap_method( - self.run_task, - default_timeout=None, - client_info=client_info, - ), - self.get_job: self._wrap_method( - self.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: self._wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_environment: self._wrap_method( - self.create_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.update_environment: self._wrap_method( - self.update_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_environment: self._wrap_method( - self.delete_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.list_environments: self._wrap_method( - self.list_environments, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_environment: self._wrap_method( - self.get_environment, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_sessions: self._wrap_method( - self.list_sessions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataplexServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py deleted file mode 100644 index 8645ebc67c27..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetadataServiceClient -from .async_client import MetadataServiceAsyncClient - -__all__ = ( - 'MetadataServiceClient', - 'MetadataServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py deleted file mode 100644 index 5ff1ef4a566b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ /dev/null @@ -1,1507 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport -from .client import MetadataServiceClient - - -class MetadataServiceAsyncClient: - """Metadata service manages metadata resources such as tables, - filesets and partitions. - """ - - _client: MetadataServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MetadataServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MetadataServiceClient._DEFAULT_UNIVERSE - - entity_path = staticmethod(MetadataServiceClient.entity_path) - parse_entity_path = staticmethod(MetadataServiceClient.parse_entity_path) - partition_path = staticmethod(MetadataServiceClient.partition_path) - parse_partition_path = staticmethod(MetadataServiceClient.parse_partition_path) - zone_path = staticmethod(MetadataServiceClient.zone_path) - parse_zone_path = staticmethod(MetadataServiceClient.parse_zone_path) - common_billing_account_path = staticmethod(MetadataServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetadataServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MetadataServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MetadataServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MetadataServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MetadataServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MetadataServiceClient.common_project_path) - parse_common_project_path = staticmethod(MetadataServiceClient.parse_common_project_path) - common_location_path = staticmethod(MetadataServiceClient.common_location_path) - parse_common_location_path = staticmethod(MetadataServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceAsyncClient: The constructed client. - """ - return MetadataServiceClient.from_service_account_info.__func__(MetadataServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceAsyncClient: The constructed client. - """ - return MetadataServiceClient.from_service_account_file.__func__(MetadataServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MetadataServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MetadataServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetadataServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MetadataServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metadata service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetadataServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetadataServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_entity(self, - request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, - *, - parent: Optional[str] = None, - entity: Optional[metadata_.Entity] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Create a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = await client.create_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]]): - The request object. Create a metadata entity request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity (:class:`google.cloud.dataplex_v1.types.Entity`): - Required. Entity resource. - This corresponds to the ``entity`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entity]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreateEntityRequest): - request = metadata_.CreateEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entity is not None: - request.entity = entity - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entity(self, - request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Update a metadata entity. Only supports full resource - update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = await client.update_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]]): - The request object. Update a metadata entity request. - The exiting entity will be fully - replaced by the entity in the request. - The entity ID is mutable. To modify the - ID, use the current entity ID in the - request URL and specify the new ID in - the request body. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.UpdateEntityRequest): - request = metadata_.UpdateEntityRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entity.name", request.entity.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entity(self, - request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - await client.delete_entity(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]]): - The request object. Delete a metadata entity request. - name (:class:`str`): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeleteEntityRequest): - request = metadata_.DeleteEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_entity(self, - request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Get a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]]): - The request object. Get metadata entity request. - name (:class:`str`): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetEntityRequest): - request = metadata_.GetEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entities(self, - request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntitiesAsyncPager: - r"""List metadata entities in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]]): - The request object. List metadata entities request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager: - List metadata entities response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListEntitiesRequest): - request = metadata_.ListEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntitiesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_partition(self, - request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - partition: Optional[metadata_.Partition] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Partition: - r"""Create a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = await client.create_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]]): - The request object. Create metadata partition request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - partition (:class:`google.cloud.dataplex_v1.types.Partition`): - Required. Partition resource. - This corresponds to the ``partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, partition]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreatePartitionRequest): - request = metadata_.CreatePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if partition is not None: - request.partition = partition - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_partition(self, - request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_partition(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]]): - The request object. Delete metadata partition request. - name (:class:`str`): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeletePartitionRequest): - request = metadata_.DeletePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_partition(self, - request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Partition: - r"""Get a metadata partition of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]]): - The request object. Get metadata partition request. - name (:class:`str`): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetPartitionRequest): - request = metadata_.GetPartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_partitions(self, - request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPartitionsAsyncPager: - r"""List metadata partitions of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]]): - The request object. List metadata partitions request. - parent (:class:`str`): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager: - List metadata partitions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListPartitionsRequest): - request = metadata_.ListPartitionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPartitionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MetadataServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetadataServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py deleted file mode 100644 index 59ba45b296de..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py +++ /dev/null @@ -1,1840 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetadataServiceGrpcTransport -from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport - - -class MetadataServiceClientMeta(type): - """Metaclass for the MetadataService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] - _transport_registry["grpc"] = MetadataServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MetadataServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetadataServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetadataServiceClient(metaclass=MetadataServiceClientMeta): - """Metadata service manages metadata resources such as tables, - filesets and partitions. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetadataServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetadataServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: - """Returns a fully-qualified entity string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - - @staticmethod - def parse_entity_path(path: str) -> Dict[str,str]: - """Parses a entity path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def partition_path(project: str,location: str,lake: str,zone: str,entity: str,partition: str,) -> str: - """Returns a fully-qualified partition string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) - - @staticmethod - def parse_partition_path(path: str) -> Dict[str,str]: - """Parses a partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)/partitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def zone_path(project: str,location: str,lake: str,zone: str,) -> str: - """Returns a fully-qualified zone string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - - @staticmethod - def parse_zone_path(path: str) -> Dict[str,str]: - """Parses a zone path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MetadataServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metadata service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetadataServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetadataServiceClient._read_environment_variables() - self._client_cert_source = MetadataServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MetadataServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MetadataServiceTransport) - if transport_provided: - # transport is a MetadataServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MetadataServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MetadataServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport]] = ( - MetadataServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MetadataServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def create_entity(self, - request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, - *, - parent: Optional[str] = None, - entity: Optional[metadata_.Entity] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Create a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = client.create_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): - The request object. Create a metadata entity request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity (google.cloud.dataplex_v1.types.Entity): - Required. Entity resource. - This corresponds to the ``entity`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entity]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreateEntityRequest): - request = metadata_.CreateEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entity is not None: - request.entity = entity - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entity(self, - request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Update a metadata entity. Only supports full resource - update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = client.update_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): - The request object. Update a metadata entity request. - The exiting entity will be fully - replaced by the entity in the request. - The entity ID is mutable. To modify the - ID, use the current entity ID in the - request URL and specify the new ID in - the request body. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.UpdateEntityRequest): - request = metadata_.UpdateEntityRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entity.name", request.entity.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entity(self, - request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - client.delete_entity(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): - The request object. Delete a metadata entity request. - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeleteEntityRequest): - request = metadata_.DeleteEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_entity(self, - request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Entity: - r"""Get a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = client.get_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]): - The request object. Get metadata entity request. - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetEntityRequest): - request = metadata_.GetEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entities(self, - request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntitiesPager: - r"""List metadata entities in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]): - The request object. List metadata entities request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager: - List metadata entities response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListEntitiesRequest): - request = metadata_.ListEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntitiesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_partition(self, - request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - partition: Optional[metadata_.Partition] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Partition: - r"""Create a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = client.create_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): - The request object. Create metadata partition request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - partition (google.cloud.dataplex_v1.types.Partition): - Required. Partition resource. - This corresponds to the ``partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, partition]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreatePartitionRequest): - request = metadata_.CreatePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if partition is not None: - request.partition = partition - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_partition(self, - request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Delete a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_partition(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): - The request object. Delete metadata partition request. - name (str): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeletePartitionRequest): - request = metadata_.DeletePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_partition(self, - request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metadata_.Partition: - r"""Get a metadata partition of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]): - The request object. Get metadata partition request. - name (str): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetPartitionRequest): - request = metadata_.GetPartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_partitions(self, - request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPartitionsPager: - r"""List metadata partitions of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]): - The request object. List metadata partitions request. - parent (str): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager: - List metadata partitions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListPartitionsRequest): - request = metadata_.ListPartitionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPartitionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MetadataServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetadataServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py deleted file mode 100644 index 2c3cb84a3aa8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py +++ /dev/null @@ -1,297 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ - - -class ListEntitiesPager: - """A pager for iterating through ``list_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entities`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntities`` requests and continue to iterate - through the ``entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metadata_.ListEntitiesResponse], - request: metadata_.ListEntitiesRequest, - response: metadata_.ListEntitiesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntitiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntitiesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metadata_.ListEntitiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metadata_.ListEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metadata_.Entity]: - for page in self.pages: - yield from page.entities - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntitiesAsyncPager: - """A pager for iterating through ``list_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entities`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntities`` requests and continue to iterate - through the ``entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metadata_.ListEntitiesResponse]], - request: metadata_.ListEntitiesRequest, - response: metadata_.ListEntitiesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntitiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntitiesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metadata_.ListEntitiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metadata_.ListEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metadata_.Entity]: - async def async_generator(): - async for page in self.pages: - for response in page.entities: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPartitionsPager: - """A pager for iterating through ``list_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListPartitions`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metadata_.ListPartitionsResponse], - request: metadata_.ListPartitionsRequest, - response: metadata_.ListPartitionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListPartitionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListPartitionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metadata_.ListPartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metadata_.ListPartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metadata_.Partition]: - for page in self.pages: - yield from page.partitions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPartitionsAsyncPager: - """A pager for iterating through ``list_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListPartitions`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metadata_.ListPartitionsResponse]], - request: metadata_.ListPartitionsRequest, - response: metadata_.ListPartitionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListPartitionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListPartitionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metadata_.ListPartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metadata_.ListPartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metadata_.Partition]: - async def async_generator(): - async for page in self.pages: - for response in page.partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst deleted file mode 100644 index ff25cadba5cb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MetadataServiceTransport` is the ABC for all transports. -- public child `MetadataServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MetadataServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMetadataServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MetadataServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py deleted file mode 100644 index 74054de3d263..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetadataServiceTransport -from .grpc import MetadataServiceGrpcTransport -from .grpc_asyncio import MetadataServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] -_transport_registry['grpc'] = MetadataServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MetadataServiceGrpcAsyncIOTransport - -__all__ = ( - 'MetadataServiceTransport', - 'MetadataServiceGrpcTransport', - 'MetadataServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py deleted file mode 100644 index 88a53d1b4543..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py +++ /dev/null @@ -1,389 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MetadataServiceTransport(abc.ABC): - """Abstract transport class for MetadataService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_entity: gapic_v1.method.wrap_method( - self.create_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entity: gapic_v1.method.wrap_method( - self.update_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entity: gapic_v1.method.wrap_method( - self.delete_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.get_entity: gapic_v1.method.wrap_method( - self.get_entity, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_entities: gapic_v1.method.wrap_method( - self.list_entities, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_partition: gapic_v1.method.wrap_method( - self.create_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_partition: gapic_v1.method.wrap_method( - self.delete_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.get_partition: gapic_v1.method.wrap_method( - self.get_partition, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_partitions: gapic_v1.method.wrap_method( - self.list_partitions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - Union[ - metadata_.ListEntitiesResponse, - Awaitable[metadata_.ListEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - Union[ - metadata_.Partition, - Awaitable[metadata_.Partition] - ]]: - raise NotImplementedError() - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - Union[ - metadata_.Partition, - Awaitable[metadata_.Partition] - ]]: - raise NotImplementedError() - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - Union[ - metadata_.ListPartitionsResponse, - Awaitable[metadata_.ListPartitionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MetadataServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py deleted file mode 100644 index b89c63165152..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py +++ /dev/null @@ -1,593 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO - - -class MetadataServiceGrpcTransport(MetadataServiceTransport): - """gRPC backend transport for MetadataService. - - Metadata service manages metadata resources such as tables, - filesets and partitions. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - metadata_.Entity]: - r"""Return a callable for the create entity method over gRPC. - - Create a metadata entity. - - Returns: - Callable[[~.CreateEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entity' not in self._stubs: - self._stubs['create_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreateEntity', - request_serializer=metadata_.CreateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['create_entity'] - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - metadata_.Entity]: - r"""Return a callable for the update entity method over gRPC. - - Update a metadata entity. Only supports full resource - update. - - Returns: - Callable[[~.UpdateEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entity' not in self._stubs: - self._stubs['update_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', - request_serializer=metadata_.UpdateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['update_entity'] - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entity method over gRPC. - - Delete a metadata entity. - - Returns: - Callable[[~.DeleteEntityRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entity' not in self._stubs: - self._stubs['delete_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', - request_serializer=metadata_.DeleteEntityRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entity'] - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - metadata_.Entity]: - r"""Return a callable for the get entity method over gRPC. - - Get a metadata entity. - - Returns: - Callable[[~.GetEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entity' not in self._stubs: - self._stubs['get_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetEntity', - request_serializer=metadata_.GetEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['get_entity'] - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - metadata_.ListEntitiesResponse]: - r"""Return a callable for the list entities method over gRPC. - - List metadata entities in a zone. - - Returns: - Callable[[~.ListEntitiesRequest], - ~.ListEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entities' not in self._stubs: - self._stubs['list_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListEntities', - request_serializer=metadata_.ListEntitiesRequest.serialize, - response_deserializer=metadata_.ListEntitiesResponse.deserialize, - ) - return self._stubs['list_entities'] - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - metadata_.Partition]: - r"""Return a callable for the create partition method over gRPC. - - Create a metadata partition. - - Returns: - Callable[[~.CreatePartitionRequest], - ~.Partition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_partition' not in self._stubs: - self._stubs['create_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreatePartition', - request_serializer=metadata_.CreatePartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['create_partition'] - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete partition method over gRPC. - - Delete a metadata partition. - - Returns: - Callable[[~.DeletePartitionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_partition' not in self._stubs: - self._stubs['delete_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeletePartition', - request_serializer=metadata_.DeletePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_partition'] - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - metadata_.Partition]: - r"""Return a callable for the get partition method over gRPC. - - Get a metadata partition of an entity. - - Returns: - Callable[[~.GetPartitionRequest], - ~.Partition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_partition' not in self._stubs: - self._stubs['get_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetPartition', - request_serializer=metadata_.GetPartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['get_partition'] - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - metadata_.ListPartitionsResponse]: - r"""Return a callable for the list partitions method over gRPC. - - List metadata partitions of an entity. - - Returns: - Callable[[~.ListPartitionsRequest], - ~.ListPartitionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_partitions' not in self._stubs: - self._stubs['list_partitions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListPartitions', - request_serializer=metadata_.ListPartitionsRequest.serialize, - response_deserializer=metadata_.ListPartitionsResponse.deserialize, - ) - return self._stubs['list_partitions'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MetadataServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py deleted file mode 100644 index 188524e825f9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,720 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MetadataServiceGrpcTransport - - -class MetadataServiceGrpcAsyncIOTransport(MetadataServiceTransport): - """gRPC AsyncIO backend transport for MetadataService. - - Metadata service manages metadata resources such as tables, - filesets and partitions. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the create entity method over gRPC. - - Create a metadata entity. - - Returns: - Callable[[~.CreateEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entity' not in self._stubs: - self._stubs['create_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreateEntity', - request_serializer=metadata_.CreateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['create_entity'] - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the update entity method over gRPC. - - Update a metadata entity. Only supports full resource - update. - - Returns: - Callable[[~.UpdateEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entity' not in self._stubs: - self._stubs['update_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', - request_serializer=metadata_.UpdateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['update_entity'] - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entity method over gRPC. - - Delete a metadata entity. - - Returns: - Callable[[~.DeleteEntityRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entity' not in self._stubs: - self._stubs['delete_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', - request_serializer=metadata_.DeleteEntityRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entity'] - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the get entity method over gRPC. - - Get a metadata entity. - - Returns: - Callable[[~.GetEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entity' not in self._stubs: - self._stubs['get_entity'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetEntity', - request_serializer=metadata_.GetEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['get_entity'] - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - Awaitable[metadata_.ListEntitiesResponse]]: - r"""Return a callable for the list entities method over gRPC. - - List metadata entities in a zone. - - Returns: - Callable[[~.ListEntitiesRequest], - Awaitable[~.ListEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entities' not in self._stubs: - self._stubs['list_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListEntities', - request_serializer=metadata_.ListEntitiesRequest.serialize, - response_deserializer=metadata_.ListEntitiesResponse.deserialize, - ) - return self._stubs['list_entities'] - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - Awaitable[metadata_.Partition]]: - r"""Return a callable for the create partition method over gRPC. - - Create a metadata partition. - - Returns: - Callable[[~.CreatePartitionRequest], - Awaitable[~.Partition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_partition' not in self._stubs: - self._stubs['create_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreatePartition', - request_serializer=metadata_.CreatePartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['create_partition'] - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete partition method over gRPC. - - Delete a metadata partition. - - Returns: - Callable[[~.DeletePartitionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_partition' not in self._stubs: - self._stubs['delete_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeletePartition', - request_serializer=metadata_.DeletePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_partition'] - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - Awaitable[metadata_.Partition]]: - r"""Return a callable for the get partition method over gRPC. - - Get a metadata partition of an entity. - - Returns: - Callable[[~.GetPartitionRequest], - Awaitable[~.Partition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_partition' not in self._stubs: - self._stubs['get_partition'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetPartition', - request_serializer=metadata_.GetPartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['get_partition'] - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - Awaitable[metadata_.ListPartitionsResponse]]: - r"""Return a callable for the list partitions method over gRPC. - - List metadata partitions of an entity. - - Returns: - Callable[[~.ListPartitionsRequest], - Awaitable[~.ListPartitionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_partitions' not in self._stubs: - self._stubs['list_partitions'] = self.grpc_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListPartitions', - request_serializer=metadata_.ListPartitionsRequest.serialize, - response_deserializer=metadata_.ListPartitionsResponse.deserialize, - ) - return self._stubs['list_partitions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_entity: self._wrap_method( - self.create_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entity: self._wrap_method( - self.update_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entity: self._wrap_method( - self.delete_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.get_entity: self._wrap_method( - self.get_entity, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_entities: self._wrap_method( - self.list_entities, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_partition: self._wrap_method( - self.create_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_partition: self._wrap_method( - self.delete_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.get_partition: self._wrap_method( - self.get_partition, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_partitions: self._wrap_method( - self.list_partitions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'MetadataServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py deleted file mode 100644 index 99c2580b712c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py +++ /dev/null @@ -1,408 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .analyze import ( - Content, - Environment, - Session, -) -from .catalog import ( - Aspect, - AspectSource, - AspectType, - CancelMetadataJobRequest, - CreateAspectTypeRequest, - CreateEntryGroupRequest, - CreateEntryRequest, - CreateEntryTypeRequest, - CreateMetadataJobRequest, - DeleteAspectTypeRequest, - DeleteEntryGroupRequest, - DeleteEntryRequest, - DeleteEntryTypeRequest, - Entry, - EntryGroup, - EntrySource, - EntryType, - GetAspectTypeRequest, - GetEntryGroupRequest, - GetEntryRequest, - GetEntryTypeRequest, - GetMetadataJobRequest, - ImportItem, - ListAspectTypesRequest, - ListAspectTypesResponse, - ListEntriesRequest, - ListEntriesResponse, - ListEntryGroupsRequest, - ListEntryGroupsResponse, - ListEntryTypesRequest, - ListEntryTypesResponse, - ListMetadataJobsRequest, - ListMetadataJobsResponse, - LookupEntryRequest, - MetadataJob, - SearchEntriesRequest, - SearchEntriesResponse, - SearchEntriesResult, - UpdateAspectTypeRequest, - UpdateEntryGroupRequest, - UpdateEntryRequest, - UpdateEntryTypeRequest, - EntryView, - TransferStatus, -) -from .content import ( - CreateContentRequest, - DeleteContentRequest, - GetContentRequest, - ListContentRequest, - ListContentResponse, - UpdateContentRequest, -) -from .data_discovery import ( - DataDiscoveryResult, - DataDiscoverySpec, -) -from .data_profile import ( - DataProfileResult, - DataProfileSpec, -) -from .data_quality import ( - DataQualityColumnResult, - DataQualityDimension, - DataQualityDimensionResult, - DataQualityResult, - DataQualityRule, - DataQualityRuleResult, - DataQualitySpec, -) -from .data_taxonomy import ( - CreateDataAttributeBindingRequest, - CreateDataAttributeRequest, - CreateDataTaxonomyRequest, - DataAttribute, - DataAttributeBinding, - DataTaxonomy, - DeleteDataAttributeBindingRequest, - DeleteDataAttributeRequest, - DeleteDataTaxonomyRequest, - GetDataAttributeBindingRequest, - GetDataAttributeRequest, - GetDataTaxonomyRequest, - ListDataAttributeBindingsRequest, - ListDataAttributeBindingsResponse, - ListDataAttributesRequest, - ListDataAttributesResponse, - ListDataTaxonomiesRequest, - ListDataTaxonomiesResponse, - UpdateDataAttributeBindingRequest, - UpdateDataAttributeRequest, - UpdateDataTaxonomyRequest, -) -from .datascans import ( - CreateDataScanRequest, - DataScan, - DataScanJob, - DeleteDataScanRequest, - GenerateDataQualityRulesRequest, - GenerateDataQualityRulesResponse, - GetDataScanJobRequest, - GetDataScanRequest, - ListDataScanJobsRequest, - ListDataScanJobsResponse, - ListDataScansRequest, - ListDataScansResponse, - RunDataScanRequest, - RunDataScanResponse, - UpdateDataScanRequest, - DataScanType, -) -from .logs import ( - DataQualityScanRuleResult, - DataScanEvent, - DiscoveryEvent, - GovernanceEvent, - JobEvent, - SessionEvent, -) -from .metadata_ import ( - CreateEntityRequest, - CreatePartitionRequest, - DeleteEntityRequest, - DeletePartitionRequest, - Entity, - GetEntityRequest, - GetPartitionRequest, - ListEntitiesRequest, - ListEntitiesResponse, - ListPartitionsRequest, - ListPartitionsResponse, - Partition, - Schema, - StorageAccess, - StorageFormat, - UpdateEntityRequest, - StorageSystem, -) -from .processing import ( - DataSource, - ScannedData, - Trigger, -) -from .resources import ( - Action, - Asset, - AssetStatus, - Lake, - Zone, - State, -) -from .security import ( - DataAccessSpec, - ResourceAccessSpec, -) -from .service import ( - CancelJobRequest, - CreateAssetRequest, - CreateEnvironmentRequest, - CreateLakeRequest, - CreateTaskRequest, - CreateZoneRequest, - DeleteAssetRequest, - DeleteEnvironmentRequest, - DeleteLakeRequest, - DeleteTaskRequest, - DeleteZoneRequest, - GetAssetRequest, - GetEnvironmentRequest, - GetJobRequest, - GetLakeRequest, - GetTaskRequest, - GetZoneRequest, - ListActionsResponse, - ListAssetActionsRequest, - ListAssetsRequest, - ListAssetsResponse, - ListEnvironmentsRequest, - ListEnvironmentsResponse, - ListJobsRequest, - ListJobsResponse, - ListLakeActionsRequest, - ListLakesRequest, - ListLakesResponse, - ListSessionsRequest, - ListSessionsResponse, - ListTasksRequest, - ListTasksResponse, - ListZoneActionsRequest, - ListZonesRequest, - ListZonesResponse, - OperationMetadata, - RunTaskRequest, - RunTaskResponse, - UpdateAssetRequest, - UpdateEnvironmentRequest, - UpdateLakeRequest, - UpdateTaskRequest, - UpdateZoneRequest, -) -from .tasks import ( - Job, - Task, -) - -__all__ = ( - 'Content', - 'Environment', - 'Session', - 'Aspect', - 'AspectSource', - 'AspectType', - 'CancelMetadataJobRequest', - 'CreateAspectTypeRequest', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateEntryTypeRequest', - 'CreateMetadataJobRequest', - 'DeleteAspectTypeRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteEntryTypeRequest', - 'Entry', - 'EntryGroup', - 'EntrySource', - 'EntryType', - 'GetAspectTypeRequest', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetEntryTypeRequest', - 'GetMetadataJobRequest', - 'ImportItem', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'LookupEntryRequest', - 'MetadataJob', - 'SearchEntriesRequest', - 'SearchEntriesResponse', - 'SearchEntriesResult', - 'UpdateAspectTypeRequest', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateEntryTypeRequest', - 'EntryView', - 'TransferStatus', - 'CreateContentRequest', - 'DeleteContentRequest', - 'GetContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'UpdateContentRequest', - 'DataDiscoveryResult', - 'DataDiscoverySpec', - 'DataProfileResult', - 'DataProfileSpec', - 'DataQualityColumnResult', - 'DataQualityDimension', - 'DataQualityDimensionResult', - 'DataQualityResult', - 'DataQualityRule', - 'DataQualityRuleResult', - 'DataQualitySpec', - 'CreateDataAttributeBindingRequest', - 'CreateDataAttributeRequest', - 'CreateDataTaxonomyRequest', - 'DataAttribute', - 'DataAttributeBinding', - 'DataTaxonomy', - 'DeleteDataAttributeBindingRequest', - 'DeleteDataAttributeRequest', - 'DeleteDataTaxonomyRequest', - 'GetDataAttributeBindingRequest', - 'GetDataAttributeRequest', - 'GetDataTaxonomyRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'UpdateDataAttributeBindingRequest', - 'UpdateDataAttributeRequest', - 'UpdateDataTaxonomyRequest', - 'CreateDataScanRequest', - 'DataScan', - 'DataScanJob', - 'DeleteDataScanRequest', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'GetDataScanJobRequest', - 'GetDataScanRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'UpdateDataScanRequest', - 'DataScanType', - 'DataQualityScanRuleResult', - 'DataScanEvent', - 'DiscoveryEvent', - 'GovernanceEvent', - 'JobEvent', - 'SessionEvent', - 'CreateEntityRequest', - 'CreatePartitionRequest', - 'DeleteEntityRequest', - 'DeletePartitionRequest', - 'Entity', - 'GetEntityRequest', - 'GetPartitionRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'ListPartitionsRequest', - 'ListPartitionsResponse', - 'Partition', - 'Schema', - 'StorageAccess', - 'StorageFormat', - 'UpdateEntityRequest', - 'StorageSystem', - 'DataSource', - 'ScannedData', - 'Trigger', - 'Action', - 'Asset', - 'AssetStatus', - 'Lake', - 'Zone', - 'State', - 'DataAccessSpec', - 'ResourceAccessSpec', - 'CancelJobRequest', - 'CreateAssetRequest', - 'CreateEnvironmentRequest', - 'CreateLakeRequest', - 'CreateTaskRequest', - 'CreateZoneRequest', - 'DeleteAssetRequest', - 'DeleteEnvironmentRequest', - 'DeleteLakeRequest', - 'DeleteTaskRequest', - 'DeleteZoneRequest', - 'GetAssetRequest', - 'GetEnvironmentRequest', - 'GetJobRequest', - 'GetLakeRequest', - 'GetTaskRequest', - 'GetZoneRequest', - 'ListActionsResponse', - 'ListAssetActionsRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListLakeActionsRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'ListZoneActionsRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'OperationMetadata', - 'RunTaskRequest', - 'RunTaskResponse', - 'UpdateAssetRequest', - 'UpdateEnvironmentRequest', - 'UpdateLakeRequest', - 'UpdateTaskRequest', - 'UpdateZoneRequest', - 'Job', - 'Task', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py deleted file mode 100644 index a12a3b34e864..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import resources -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Environment', - 'Content', - 'Session', - }, -) - - -class Environment(proto.Message): - r"""Environment represents a user-visible compute infrastructure - for analytics within a lake. - - Attributes: - name (str): - Output only. The relative resource name of the environment, - of the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the environment. This ID will be - different if the environment is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Environment creation time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the environment - was last updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the - environment. - description (str): - Optional. Description of the environment. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the - environment. - infrastructure_spec (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec): - Required. Infrastructure specification for - the Environment. - session_spec (google.cloud.dataplex_v1.types.Environment.SessionSpec): - Optional. Configuration for sessions created - for this environment. - session_status (google.cloud.dataplex_v1.types.Environment.SessionStatus): - Output only. Status of sessions created for - this environment. - endpoints (google.cloud.dataplex_v1.types.Environment.Endpoints): - Output only. URI Endpoints to access sessions - associated with the Environment. - """ - - class InfrastructureSpec(proto.Message): - r"""Configuration for the underlying infrastructure used to run - workloads. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - compute (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.ComputeResources): - Optional. Compute resources needed for - analyze interactive workloads. - - This field is a member of `oneof`_ ``resources``. - os_image (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime): - Required. Software Runtime Configuration for - analyze interactive workloads. - - This field is a member of `oneof`_ ``runtime``. - """ - - class ComputeResources(proto.Message): - r"""Compute resources associated with the analyze interactive - workloads. - - Attributes: - disk_size_gb (int): - Optional. Size in GB of the disk. Default is - 100 GB. - node_count (int): - Optional. Total number of nodes in the - sessions created for this environment. - max_node_count (int): - Optional. Max configurable nodes. If max_node_count > - node_count, then auto-scaling is enabled. - """ - - disk_size_gb: int = proto.Field( - proto.INT32, - number=1, - ) - node_count: int = proto.Field( - proto.INT32, - number=2, - ) - max_node_count: int = proto.Field( - proto.INT32, - number=3, - ) - - class OsImageRuntime(proto.Message): - r"""Software Runtime Configuration to run Analyze. - - Attributes: - image_version (str): - Required. Dataplex Image version. - java_libraries (MutableSequence[str]): - Optional. List of Java jars to be included in - the runtime environment. Valid input includes - Cloud Storage URIs to Jar binaries. For example, - gs://bucket-name/my/path/to/file.jar - python_packages (MutableSequence[str]): - Optional. A list of python packages to be - installed. Valid formats include Cloud Storage - URI to a PIP installable library. For example, - gs://bucket-name/my/path/to/lib.tar.gz - properties (MutableMapping[str, str]): - Optional. Spark properties to provide configuration for use - in sessions created for this environment. The properties to - set on daemon config files. Property keys are specified in - ``prefix:property`` format. The prefix must be "spark". - """ - - image_version: str = proto.Field( - proto.STRING, - number=1, - ) - java_libraries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_packages: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - compute: 'Environment.InfrastructureSpec.ComputeResources' = proto.Field( - proto.MESSAGE, - number=50, - oneof='resources', - message='Environment.InfrastructureSpec.ComputeResources', - ) - os_image: 'Environment.InfrastructureSpec.OsImageRuntime' = proto.Field( - proto.MESSAGE, - number=100, - oneof='runtime', - message='Environment.InfrastructureSpec.OsImageRuntime', - ) - - class SessionSpec(proto.Message): - r"""Configuration for sessions created for this environment. - - Attributes: - max_idle_duration (google.protobuf.duration_pb2.Duration): - Optional. The idle time configuration of the - session. The session will be auto-terminated at - the end of this period. - enable_fast_startup (bool): - Optional. If True, this causes sessions to be - pre-created and available for faster startup to - enable interactive exploration use-cases. This - defaults to False to avoid additional billed - charges. These can only be set to True for the - environment with name set to "default", and with - default configuration. - """ - - max_idle_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - enable_fast_startup: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class SessionStatus(proto.Message): - r"""Status of sessions created for this environment. - - Attributes: - active (bool): - Output only. Queries over sessions to mark - whether the environment is currently active or - not - """ - - active: bool = proto.Field( - proto.BOOL, - number=1, - ) - - class Endpoints(proto.Message): - r"""URI Endpoints to access sessions associated with the - Environment. - - Attributes: - notebooks (str): - Output only. URI to serve notebook APIs - sql (str): - Output only. URI to serve SQL APIs - """ - - notebooks: str = proto.Field( - proto.STRING, - number=1, - ) - sql: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=8, - enum=resources.State, - ) - infrastructure_spec: InfrastructureSpec = proto.Field( - proto.MESSAGE, - number=100, - message=InfrastructureSpec, - ) - session_spec: SessionSpec = proto.Field( - proto.MESSAGE, - number=101, - message=SessionSpec, - ) - session_status: SessionStatus = proto.Field( - proto.MESSAGE, - number=102, - message=SessionStatus, - ) - endpoints: Endpoints = proto.Field( - proto.MESSAGE, - number=200, - message=Endpoints, - ) - - -class Content(proto.Message): - r"""Content represents a user-visible notebook or a sql script - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the content, of - the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - uid (str): - Output only. System generated globally unique - ID for the content. This ID will be different if - the content is deleted and re-created with the - same name. - path (str): - Required. The path for the Content file, - represented as directory structure. Unique - within a lake. Limited to alphanumerics, - hyphens, underscores, dots and slashes. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Content creation time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the content was - last updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the - content. - description (str): - Optional. Description of the content. - data_text (str): - Required. Content data in string format. - - This field is a member of `oneof`_ ``data``. - sql_script (google.cloud.dataplex_v1.types.Content.SqlScript): - Sql Script related configurations. - - This field is a member of `oneof`_ ``content``. - notebook (google.cloud.dataplex_v1.types.Content.Notebook): - Notebook related configurations. - - This field is a member of `oneof`_ ``content``. - """ - - class SqlScript(proto.Message): - r"""Configuration for the Sql Script content. - - Attributes: - engine (google.cloud.dataplex_v1.types.Content.SqlScript.QueryEngine): - Required. Query Engine to be used for the Sql - Query. - """ - class QueryEngine(proto.Enum): - r"""Query Engine Type of the SQL Script. - - Values: - QUERY_ENGINE_UNSPECIFIED (0): - Value was unspecified. - SPARK (2): - Spark SQL Query. - """ - QUERY_ENGINE_UNSPECIFIED = 0 - SPARK = 2 - - engine: 'Content.SqlScript.QueryEngine' = proto.Field( - proto.ENUM, - number=1, - enum='Content.SqlScript.QueryEngine', - ) - - class Notebook(proto.Message): - r"""Configuration for Notebook content. - - Attributes: - kernel_type (google.cloud.dataplex_v1.types.Content.Notebook.KernelType): - Required. Kernel Type of the notebook. - """ - class KernelType(proto.Enum): - r"""Kernel Type of the Jupyter notebook. - - Values: - KERNEL_TYPE_UNSPECIFIED (0): - Kernel Type unspecified. - PYTHON3 (1): - Python 3 Kernel. - """ - KERNEL_TYPE_UNSPECIFIED = 0 - PYTHON3 = 1 - - kernel_type: 'Content.Notebook.KernelType' = proto.Field( - proto.ENUM, - number=1, - enum='Content.Notebook.KernelType', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - path: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - data_text: str = proto.Field( - proto.STRING, - number=9, - oneof='data', - ) - sql_script: SqlScript = proto.Field( - proto.MESSAGE, - number=100, - oneof='content', - message=SqlScript, - ) - notebook: Notebook = proto.Field( - proto.MESSAGE, - number=101, - oneof='content', - message=Notebook, - ) - - -class Session(proto.Message): - r"""Represents an active analyze session running for a user. - - Attributes: - name (str): - Output only. The relative resource name of the content, of - the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}/sessions/{session_id} - user_id (str): - Output only. Email of user running the - session. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Session start time. - state (google.cloud.dataplex_v1.types.State): - Output only. State of Session - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=4, - enum=resources.State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py deleted file mode 100644 index b1233e502d15..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py +++ /dev/null @@ -1,2630 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'EntryView', - 'TransferStatus', - 'AspectType', - 'EntryGroup', - 'EntryType', - 'Aspect', - 'AspectSource', - 'Entry', - 'EntrySource', - 'CreateEntryGroupRequest', - 'UpdateEntryGroupRequest', - 'DeleteEntryGroupRequest', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'GetEntryGroupRequest', - 'CreateEntryTypeRequest', - 'UpdateEntryTypeRequest', - 'DeleteEntryTypeRequest', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'GetEntryTypeRequest', - 'CreateAspectTypeRequest', - 'UpdateAspectTypeRequest', - 'DeleteAspectTypeRequest', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'GetAspectTypeRequest', - 'CreateEntryRequest', - 'UpdateEntryRequest', - 'DeleteEntryRequest', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'GetEntryRequest', - 'LookupEntryRequest', - 'SearchEntriesRequest', - 'SearchEntriesResult', - 'SearchEntriesResponse', - 'ImportItem', - 'CreateMetadataJobRequest', - 'GetMetadataJobRequest', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'CancelMetadataJobRequest', - 'MetadataJob', - }, -) - - -class EntryView(proto.Enum): - r"""View for controlling which parts of an entry are to be - returned. - - Values: - ENTRY_VIEW_UNSPECIFIED (0): - Unspecified EntryView. Defaults to FULL. - BASIC (1): - Returns entry only, without aspects. - FULL (2): - Returns all required aspects as well as the - keys of all non-required aspects. - CUSTOM (3): - Returns aspects matching custom fields in - GetEntryRequest. If the number of aspects - exceeds 100, the first 100 will be returned. - ALL (4): - Returns all aspects. If the number of aspects - exceeds 100, the first 100 will be returned. - """ - ENTRY_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - CUSTOM = 3 - ALL = 4 - - -class TransferStatus(proto.Enum): - r"""Denotes the transfer status of a resource. It is unspecified - for resources created from Dataplex API. - - Values: - TRANSFER_STATUS_UNSPECIFIED (0): - The default value. It is set for resources - that were not subject for migration from Data - Catalog service. - TRANSFER_STATUS_MIGRATED (1): - Indicates that a resource was migrated from - Data Catalog service but it hasn't been - transferred yet. In particular the resource - cannot be updated from Dataplex API. - TRANSFER_STATUS_TRANSFERRED (2): - Indicates that a resource was transferred - from Data Catalog service. The resource can only - be updated from Dataplex API. - """ - TRANSFER_STATUS_UNSPECIFIED = 0 - TRANSFER_STATUS_MIGRATED = 1 - TRANSFER_STATUS_TRANSFERRED = 2 - - -class AspectType(proto.Message): - r"""AspectType is a template for creating Aspects, and represents - the JSON-schema for a given Entry, for example, BigQuery Table - Schema. - - Attributes: - name (str): - Output only. The relative resource name of the AspectType, - of the form: - projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. - uid (str): - Output only. System generated globally unique - ID for the AspectType. If you delete and - recreate the AspectType with the same name, then - this ID will be different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the AspectType was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the AspectType was - last updated. - description (str): - Optional. Description of the AspectType. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - AspectType. - etag (str): - The service computes this checksum. The - client may send it on update and delete requests - to ensure it has an up-to-date value before - proceeding. - authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): - Immutable. Defines the Authorization for this - type. - metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Required. MetadataTemplate of the aspect. - transfer_status (google.cloud.dataplex_v1.types.TransferStatus): - Output only. Denotes the transfer status of - the Aspect Type. It is unspecified for Aspect - Types created from Dataplex API. - """ - - class Authorization(proto.Message): - r"""Autorization for an AspectType. - - Attributes: - alternate_use_permission (str): - Immutable. The IAM permission grantable on - the EntryGroup to allow access to instantiate - Aspects of Dataplex owned AspectTypes, only - settable for Dataplex owned Types. - """ - - alternate_use_permission: str = proto.Field( - proto.STRING, - number=1, - ) - - class MetadataTemplate(proto.Message): - r"""MetadataTemplate definition for an AspectType. - - Attributes: - index (int): - Optional. Index is used to encode Template - messages. The value of index can range between 1 - and 2,147,483,647. Index must be unique within - all fields in a Template. (Nested Templates can - reuse indexes). Once a Template is defined, the - index cannot be changed, because it identifies - the field in the actual storage format. Index is - a mandatory field, but it is optional for top - level fields, and map/array "values" - definitions. - name (str): - Required. The name of the field. - type_ (str): - Required. The datatype of this field. The following values - are supported: - - Primitive types: - - - string - - integer - - boolean - - double - - datetime. Must be of the format RFC3339 UTC "Zulu" - (Examples: "2014-10-02T15:01:23Z" and - "2014-10-02T15:01:23.045123456Z"). - - Complex types: - - - enum - - array - - map - - record - record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): - Optional. Field definition. You must specify - it if the type is record. It defines the nested - fields. - enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): - Optional. The list of values for an enum - type. You must define it if the type is enum. - map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. If the type is map, set map_items. map_items can - refer to a primitive field or a complex (record only) field. - To specify a primitive field, you only need to set name and - type in the nested MetadataTemplate. The recommended value - for the name field is item, as this isn't used in the actual - payload. - array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. If the type is array, set array_items. array_items - can refer to a primitive field or a complex (record only) - field. To specify a primitive field, you only need to set - name and type in the nested MetadataTemplate. The - recommended value for the name field is item, as this isn't - used in the actual payload. - type_id (str): - Optional. You can use type id if this - definition of the field needs to be reused - later. The type id must be unique across the - entire template. You can only specify it if the - field type is record. - type_ref (str): - Optional. A reference to another field - definition (not an inline definition). The value - must be equal to the value of an id field - defined elsewhere in the MetadataTemplate. Only - fields with record type can refer to other - fields. - constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): - Optional. Specifies the constraints on this - field. - annotations (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Annotations): - Optional. Specifies annotations on this - field. - """ - - class EnumValue(proto.Message): - r"""Definition of Enumvalue, to be used for enum fields. - - Attributes: - index (int): - Required. Index for the enum value. It can't - be modified. - name (str): - Required. Name of the enumvalue. This is the - actual value that the aspect can contain. - deprecated (str): - Optional. You can set this message if you - need to deprecate an enum value. - """ - - index: int = proto.Field( - proto.INT32, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - deprecated: str = proto.Field( - proto.STRING, - number=3, - ) - - class Constraints(proto.Message): - r"""Definition of the constraints of a field. - - Attributes: - required (bool): - Optional. Marks this field as optional or - required. - """ - - required: bool = proto.Field( - proto.BOOL, - number=1, - ) - - class Annotations(proto.Message): - r"""Definition of the annotations of a field. - - Attributes: - deprecated (str): - Optional. Marks a field as deprecated. You - can include a deprecation message. - display_name (str): - Optional. Display name for a field. - description (str): - Optional. Description for a field. - display_order (int): - Optional. Display order for a field. You can - use this to reorder where a field is rendered. - string_type (str): - Optional. You can use String Type annotations to specify - special meaning to string fields. The following values are - supported: - - - richText: The field must be interpreted as a rich text - field. - - url: A fully qualified URL link. - - resource: A service qualified resource reference. - string_values (MutableSequence[str]): - Optional. Suggested hints for string fields. - You can use them to suggest values to users - through console. - """ - - deprecated: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - display_order: int = proto.Field( - proto.INT32, - number=4, - ) - string_type: str = proto.Field( - proto.STRING, - number=6, - ) - string_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - - index: int = proto.Field( - proto.INT32, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=5, - ) - record_fields: MutableSequence['AspectType.MetadataTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AspectType.MetadataTemplate', - ) - enum_values: MutableSequence['AspectType.MetadataTemplate.EnumValue'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='AspectType.MetadataTemplate.EnumValue', - ) - map_items: 'AspectType.MetadataTemplate' = proto.Field( - proto.MESSAGE, - number=10, - message='AspectType.MetadataTemplate', - ) - array_items: 'AspectType.MetadataTemplate' = proto.Field( - proto.MESSAGE, - number=11, - message='AspectType.MetadataTemplate', - ) - type_id: str = proto.Field( - proto.STRING, - number=12, - ) - type_ref: str = proto.Field( - proto.STRING, - number=13, - ) - constraints: 'AspectType.MetadataTemplate.Constraints' = proto.Field( - proto.MESSAGE, - number=50, - message='AspectType.MetadataTemplate.Constraints', - ) - annotations: 'AspectType.MetadataTemplate.Annotations' = proto.Field( - proto.MESSAGE, - number=51, - message='AspectType.MetadataTemplate.Annotations', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - authorization: Authorization = proto.Field( - proto.MESSAGE, - number=52, - message=Authorization, - ) - metadata_template: MetadataTemplate = proto.Field( - proto.MESSAGE, - number=53, - message=MetadataTemplate, - ) - transfer_status: 'TransferStatus' = proto.Field( - proto.ENUM, - number=202, - enum='TransferStatus', - ) - - -class EntryGroup(proto.Message): - r"""An Entry Group represents a logical grouping of one or more - Entries. - - Attributes: - name (str): - Output only. The relative resource name of the EntryGroup, - in the format - projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. - uid (str): - Output only. System generated globally unique - ID for the EntryGroup. If you delete and - recreate the EntryGroup with the same name, this - ID will be different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryGroup was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryGroup was - last updated. - description (str): - Optional. Description of the EntryGroup. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - EntryGroup. - etag (str): - This checksum is computed by the service, and - might be sent on update and delete requests to - ensure the client has an up-to-date value before - proceeding. - transfer_status (google.cloud.dataplex_v1.types.TransferStatus): - Output only. Denotes the transfer status of - the Entry Group. It is unspecified for Entry - Group created from Dataplex API. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - transfer_status: 'TransferStatus' = proto.Field( - proto.ENUM, - number=202, - enum='TransferStatus', - ) - - -class EntryType(proto.Message): - r"""Entry Type is a template for creating Entries. - - Attributes: - name (str): - Output only. The relative resource name of the EntryType, of - the form: - projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}. - uid (str): - Output only. System generated globally unique - ID for the EntryType. This ID will be different - if the EntryType is deleted and re-created with - the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryType was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryType was - last updated. - description (str): - Optional. Description of the EntryType. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - EntryType. - etag (str): - Optional. This checksum is computed by the - service, and might be sent on update and delete - requests to ensure the client has an up-to-date - value before proceeding. - type_aliases (MutableSequence[str]): - Optional. Indicates the classes this Entry - Type belongs to, for example, TABLE, DATABASE, - MODEL. - platform (str): - Optional. The platform that Entries of this - type belongs to. - system (str): - Optional. The system that Entries of this - type belongs to. Examples include CloudSQL, - MariaDB etc - required_aspects (MutableSequence[google.cloud.dataplex_v1.types.EntryType.AspectInfo]): - AspectInfo for the entry type. - authorization (google.cloud.dataplex_v1.types.EntryType.Authorization): - Immutable. Authorization defined for this - type. - """ - - class AspectInfo(proto.Message): - r""" - - Attributes: - type_ (str): - Required aspect type for the entry type. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - - class Authorization(proto.Message): - r"""Authorization for an Entry Type. - - Attributes: - alternate_use_permission (str): - Immutable. The IAM permission grantable on - the Entry Group to allow access to instantiate - Entries of Dataplex owned Entry Types, only - settable for Dataplex owned Types. - """ - - alternate_use_permission: str = proto.Field( - proto.STRING, - number=1, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - type_aliases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - platform: str = proto.Field( - proto.STRING, - number=10, - ) - system: str = proto.Field( - proto.STRING, - number=11, - ) - required_aspects: MutableSequence[AspectInfo] = proto.RepeatedField( - proto.MESSAGE, - number=50, - message=AspectInfo, - ) - authorization: Authorization = proto.Field( - proto.MESSAGE, - number=51, - message=Authorization, - ) - - -class Aspect(proto.Message): - r"""An aspect is a single piece of metadata describing an entry. - - Attributes: - aspect_type (str): - Output only. The resource name of the type - used to create this Aspect. - path (str): - Output only. The path in the entry under - which the aspect is attached. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Aspect was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Aspect was - last updated. - data (google.protobuf.struct_pb2.Struct): - Required. The content of the aspect, - according to its aspect type schema. The maximum - size of the field is 120KB (encoded as UTF-8). - aspect_source (google.cloud.dataplex_v1.types.AspectSource): - Optional. Information related to the source - system of the aspect. - """ - - aspect_type: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - data: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=8, - message=struct_pb2.Struct, - ) - aspect_source: 'AspectSource' = proto.Field( - proto.MESSAGE, - number=9, - message='AspectSource', - ) - - -class AspectSource(proto.Message): - r"""Information related to the source system of the aspect. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the aspect was created in the source - system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time the aspect was last updated in the - source system. - data_version (str): - The version of the data format used to - produce this data. This field is used to - indicated when the underlying data format - changes (e.g., schema modifications, changes to - the source URL format definition, etc). - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - data_version: str = proto.Field( - proto.STRING, - number=12, - ) - - -class Entry(proto.Message): - r"""An entry is a representation of a data resource that can be - described by various metadata. - - Attributes: - name (str): - Identifier. The relative resource name of the entry, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. - entry_type (str): - Required. Immutable. The relative resource name of the entry - type that was used to create this entry, in the format - ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entry was - created in Dataplex. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entry was last - updated in Dataplex. - aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): - Optional. The aspects that are attached to the entry. - Depending on how the aspect is attached to the entry, the - format of the aspect key can be one of the following: - - - If the aspect is attached directly to the entry: - ``{project_id_or_number}.{location_id}.{aspect_type_id}`` - - If the aspect is attached to an entry's path: - ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` - parent_entry (str): - Optional. Immutable. The resource name of the - parent entry. - fully_qualified_name (str): - Optional. A name for the entry that can be referenced by an - external system. For more information, see `Fully qualified - names `__. - The maximum size of the field is 4000 characters. - entry_source (google.cloud.dataplex_v1.types.EntrySource): - Optional. Information related to the source - system of the data resource that is represented - by the entry. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - entry_type: str = proto.Field( - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - aspects: MutableMapping[str, 'Aspect'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=9, - message='Aspect', - ) - parent_entry: str = proto.Field( - proto.STRING, - number=10, - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=12, - ) - entry_source: 'EntrySource' = proto.Field( - proto.MESSAGE, - number=15, - message='EntrySource', - ) - - -class EntrySource(proto.Message): - r"""Information related to the source system of the data resource - that is represented by the entry. - - Attributes: - resource (str): - The name of the resource in the source - system. Maximum length is 4,000 characters. - system (str): - The name of the source system. - Maximum length is 64 characters. - platform (str): - The platform containing the source system. - Maximum length is 64 characters. - display_name (str): - A user-friendly display name. - Maximum length is 500 characters. - description (str): - A description of the data resource. - Maximum length is 2,000 characters. - labels (MutableMapping[str, str]): - User-defined labels. - The maximum size of keys and values is 128 - characters each. - ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): - Immutable. The entries representing the - ancestors of the data resource in the source - system. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the resource was created in the - source system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the resource was last updated in the source - system. If the entry exists in the system and its - ``EntrySource`` has ``update_time`` populated, further - updates to the ``EntrySource`` of the entry must provide - incremental updates to its ``update_time``. - location (str): - Output only. Location of the resource in the - source system. You can search the entry by this - location. By default, this should match the - location of the entry group containing this - entry. A different value allows capturing the - source location for data external to Google - Cloud. - """ - - class Ancestor(proto.Message): - r"""Information about individual items in the hierarchy that is - associated with the data resource. - - Attributes: - name (str): - Optional. The name of the ancestor resource. - type_ (str): - Optional. The type of the ancestor resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - system: str = proto.Field( - proto.STRING, - number=2, - ) - platform: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - description: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - ancestors: MutableSequence[Ancestor] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=Ancestor, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - location: str = proto.Field( - proto.STRING, - number=12, - ) - - -class CreateEntryGroupRequest(proto.Message): - r"""Create EntryGroup Request. - - Attributes: - parent (str): - Required. The resource name of the entryGroup, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - entry_group_id (str): - Required. EntryGroup identifier. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_group_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=3, - message='EntryGroup', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntryGroupRequest(proto.Message): - r"""Update EntryGroup Request. - - Attributes: - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. The service validates the request, - without performing any mutations. The default is - false. - """ - - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntryGroupRequest(proto.Message): - r"""Delete EntryGroup Request. - - Attributes: - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteEntryGroupRequest method returns an - ABORTED error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntryGroupsRequest(proto.Message): - r"""List entryGroups request. - - Attributes: - parent (str): - Required. The resource name of the entryGroup location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of EntryGroups to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 EntryGroups. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEntryGroups`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provide to ``ListEntryGroups`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntryGroupsResponse(proto.Message): - r"""List entry groups response. - - Attributes: - entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): - Entry groups under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetEntryGroupRequest(proto.Message): - r"""Get EntryGroup request. - - Attributes: - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEntryTypeRequest(proto.Message): - r"""Create EntryType Request. - - Attributes: - parent (str): - Required. The resource name of the EntryType, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - entry_type_id (str): - Required. EntryType identifier. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_type_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry_type: 'EntryType' = proto.Field( - proto.MESSAGE, - number=3, - message='EntryType', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntryTypeRequest(proto.Message): - r"""Update EntryType Request. - - Attributes: - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - entry_type: 'EntryType' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryType', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntryTypeRequest(proto.Message): - r"""Delele EntryType Request. - - Attributes: - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteEntryTypeRequest method returns an ABORTED - error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntryTypesRequest(proto.Message): - r"""List EntryTypes request - - Attributes: - parent (str): - Required. The resource name of the EntryType location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of EntryTypes to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 EntryTypes. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEntryTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provided to ``ListEntryTypes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - labels.key1 = "value1" - - labels:key1 - - name = "value" - - These restrictions can be conjoined with AND, OR, and NOT - conjunctions. - order_by (str): - Optional. Orders the result by ``name`` or ``create_time`` - fields. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntryTypesResponse(proto.Message): - r"""List EntryTypes response. - - Attributes: - entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): - EntryTypes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - entry_types: MutableSequence['EntryType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetEntryTypeRequest(proto.Message): - r"""Get EntryType request. - - Attributes: - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAspectTypeRequest(proto.Message): - r"""Create AspectType Request. - - Attributes: - parent (str): - Required. The resource name of the AspectType, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - aspect_type_id (str): - Required. AspectType identifier. - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - aspect_type_id: str = proto.Field( - proto.STRING, - number=2, - ) - aspect_type: 'AspectType' = proto.Field( - proto.MESSAGE, - number=3, - message='AspectType', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateAspectTypeRequest(proto.Message): - r"""Update AspectType Request - - Attributes: - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - aspect_type: 'AspectType' = proto.Field( - proto.MESSAGE, - number=1, - message='AspectType', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteAspectTypeRequest(proto.Message): - r"""Delele AspectType Request. - - Attributes: - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteAspectTypeRequest method returns an - ABORTED error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListAspectTypesRequest(proto.Message): - r"""List AspectTypes request. - - Attributes: - parent (str): - Required. The resource name of the AspectType location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of AspectTypes to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 AspectTypes. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListAspectTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provide to ``ListAspectTypes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - labels.key1 = "value1" - - labels:key1 - - name = "value" - - These restrictions can be conjoined with AND, OR, and NOT - conjunctions. - order_by (str): - Optional. Orders the result by ``name`` or ``create_time`` - fields. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAspectTypesResponse(proto.Message): - r"""List AspectTypes response. - - Attributes: - aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): - AspectTypes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - aspect_types: MutableSequence['AspectType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AspectType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetAspectTypeRequest(proto.Message): - r"""Get AspectType request. - - Attributes: - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEntryRequest(proto.Message): - r"""Create Entry request. - - Attributes: - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - entry_id (str): - Required. Entry identifier. It has to be unique within an - Entry Group. - - Entries corresponding to Google Cloud resources use an Entry - ID format based on `full resource - names `__. - The format is a full resource name of the resource without - the prefix double slashes in the API service name part of - the full resource name. This allows retrieval of entries - using their associated resource name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or systems - other than Google Cloud. - - The maximum size of the field is 4000 characters. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=3, - message='Entry', - ) - - -class UpdateEntryRequest(proto.Message): - r"""Update Entry request. - - Attributes: - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. To update Aspects, the - update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - allow_missing (bool): - Optional. If set to true and the entry - doesn't exist, the service will create it. - delete_missing_aspects (bool): - Optional. If set to true and the aspect_keys specify aspect - ranges, the service deletes any existing aspects from that - range that weren't provided in the request. - aspect_keys (MutableSequence[str]): - Optional. The map keys of the Aspects which the service - should modify. It supports the following syntaxes: - - - ```` - matches an aspect of the - given type and empty path. - - ``@path`` - matches an aspect of - the given type and specified path. For example, to attach - an aspect to a field that is specified by the ``schema`` - aspect, the path should have the format - ``Schema.``. - - ``*`` - matches aspects of the - given type for all paths. - - ``*@path`` - matches aspects of all types on the given - path. - - The service will not remove existing aspects matching the - syntax unless ``delete_missing_aspects`` is set to true. - - If this field is left empty, the service treats it as - specifying exactly those Aspects present in the request. - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - delete_missing_aspects: bool = proto.Field( - proto.BOOL, - number=4, - ) - aspect_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class DeleteEntryRequest(proto.Message): - r"""Delete Entry request. - - Attributes: - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListEntriesRequest(proto.Message): - r"""List Entries request. - - Attributes: - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - page_size (int): - Optional. Number of items to return per page. If there are - remaining results, the service returns a next_page_token. If - unspecified, the service returns at most 10 Entries. The - maximum value is 100; values above 100 will be coerced to - 100. - page_token (str): - Optional. Page token received from a previous - ``ListEntries`` call. Provide this to retrieve the - subsequent page. - filter (str): - Optional. A filter on the entries to return. Filters are - case-sensitive. You can filter the request by the following - fields: - - - entry_type - - entry_source.display_name - - The comparison operators are =, !=, <, >, <=, >=. The - service compares strings according to lexical order. - - You can use the logical operators AND, OR, NOT in the - filter. - - You can use Wildcard "*", but for entry_type you need to - provide the full project id or number. - - Example filter expressions: - - - "entry_source.display_name=AnExampleDisplayName" - - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - - "entry_type=projects/example-project/locations/us/entryTypes/a\* - OR entry_type=projects/another-project/locations/*" - - "NOT entry_source.display_name=AnotherExampleDisplayName". - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListEntriesResponse(proto.Message): - r"""List Entries response. - - Attributes: - entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): - The list of entries under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - entries: MutableSequence['Entry'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entry', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEntryRequest(proto.Message): - r"""Get Entry request. - - Attributes: - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - view (google.cloud.dataplex_v1.types.EntryView): - Optional. View to control which parts of an - entry the service should return. - aspect_types (MutableSequence[str]): - Optional. Limits the aspects returned to the - provided aspect types. It only works for CUSTOM - view. - paths (MutableSequence[str]): - Optional. Limits the aspects returned to - those associated with the provided paths within - the Entry. It only works for CUSTOM view. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'EntryView' = proto.Field( - proto.ENUM, - number=2, - enum='EntryView', - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class LookupEntryRequest(proto.Message): - r"""Lookup Entry request using permissions in the source system. - - Attributes: - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/{location}``. - view (google.cloud.dataplex_v1.types.EntryView): - Optional. View to control which parts of an - entry the service should return. - aspect_types (MutableSequence[str]): - Optional. Limits the aspects returned to the - provided aspect types. It only works for CUSTOM - view. - paths (MutableSequence[str]): - Optional. Limits the aspects returned to - those associated with the provided paths within - the Entry. It only works for CUSTOM view. - entry (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'EntryView' = proto.Field( - proto.ENUM, - number=2, - enum='EntryView', - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - entry: str = proto.Field( - proto.STRING, - number=5, - ) - - -class SearchEntriesRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/{location}``. - query (str): - Required. The query against which entries in - scope should be matched. - page_size (int): - Optional. Number of results in the search page. If <=0, then - defaults to 10. Max limit for page_size is 1000. Throws an - invalid argument for page_size > 1000. - page_token (str): - Optional. Page token received from a previous - ``SearchEntries`` call. Provide this to retrieve the - subsequent page. - order_by (str): - Optional. Specifies the ordering of results. - scope (str): - Optional. The scope under which the search should be - operating. It must either be ``organizations/`` or - ``projects/``. If it is unspecified, it - defaults to the organization where the project provided in - ``name`` is located. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - scope: str = proto.Field( - proto.STRING, - number=7, - ) - - -class SearchEntriesResult(proto.Message): - r"""A single result of a SearchEntries request. - - Attributes: - linked_resource (str): - Linked resource name. - dataplex_entry (google.cloud.dataplex_v1.types.Entry): - - snippets (google.cloud.dataplex_v1.types.SearchEntriesResult.Snippets): - Snippets. - """ - - class Snippets(proto.Message): - r"""Snippets for the entry, contains HTML-style highlighting for - matched tokens, will be used in UI. - - Attributes: - dataplex_entry (google.cloud.dataplex_v1.types.Entry): - Entry - """ - - dataplex_entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - - linked_resource: str = proto.Field( - proto.STRING, - number=8, - ) - dataplex_entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=9, - message='Entry', - ) - snippets: Snippets = proto.Field( - proto.MESSAGE, - number=12, - message=Snippets, - ) - - -class SearchEntriesResponse(proto.Message): - r""" - - Attributes: - results (MutableSequence[google.cloud.dataplex_v1.types.SearchEntriesResult]): - The results matching the search query. - total_size (int): - The estimated total number of matching - entries. This number isn't guaranteed to be - accurate. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that the service couldn't reach. - Search results don't include data from these - locations. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence['SearchEntriesResult'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SearchEntriesResult', - ) - total_size: int = proto.Field( - proto.INT32, - number=2, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class ImportItem(proto.Message): - r"""An object that describes the values that you want to set for an - entry and its attached aspects when you import metadata. Used when - you run a metadata import job. See - [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. - - You provide a collection of import items in a metadata import file. - For more information about how to create a metadata import file, see - `Metadata import - file `__. - - Attributes: - entry (google.cloud.dataplex_v1.types.Entry): - Information about an entry and its attached - aspects. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update, in paths that are relative to the - ``Entry`` resource. Separate each field with a comma. - - In ``FULL`` entry sync mode, Dataplex includes the paths of - all of the fields for an entry that can be modified, - including aspects. This means that Dataplex replaces the - existing entry with the entry in the metadata import file. - All modifiable fields are updated, regardless of the fields - that are listed in the update mask, and regardless of - whether a field is present in the ``entry`` object. - - The ``update_mask`` field is ignored when an entry is - created or re-created. - - Dataplex also determines which entries and aspects to modify - by comparing the values and timestamps that you provide in - the metadata import file with the values and timestamps that - exist in your project. For more information, see `Comparison - logic `__. - aspect_keys (MutableSequence[str]): - The aspects to modify. Supports the following syntaxes: - - - ``{aspect_type_reference}``: matches aspects that belong - to the specified aspect type and are attached directly to - the entry. - - ``{aspect_type_reference}@{path}``: matches aspects that - belong to the specified aspect type and path. - - ``{aspect_type_reference}@*``: matches aspects that - belong to the specified aspect type for all paths. - - Replace ``{aspect_type_reference}`` with a reference to the - aspect type, in the format - ``{project_id_or_number}.{location_id}.{aspect_type_id}``. - - If you leave this field empty, it is treated as specifying - exactly those aspects that are present within the specified - entry. - - In ``FULL`` entry sync mode, Dataplex implicitly adds the - keys for all of the required aspects of an entry. - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - aspect_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CreateMetadataJobRequest(proto.Message): - r"""Create metadata job request. - - Attributes: - parent (str): - Required. The resource name of the parent location, in the - format - ``projects/{project_id_or_number}/locations/{location_id}`` - metadata_job (google.cloud.dataplex_v1.types.MetadataJob): - Required. The metadata job resource. - metadata_job_id (str): - Optional. The metadata job ID. If not provided, a unique ID - is generated with the prefix ``metadata-job-``. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - metadata_job: 'MetadataJob' = proto.Field( - proto.MESSAGE, - number=2, - message='MetadataJob', - ) - metadata_job_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class GetMetadataJobRequest(proto.Message): - r"""Get metadata job request. - - Attributes: - name (str): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListMetadataJobsRequest(proto.Message): - r"""List metadata jobs request. - - Attributes: - parent (str): - Required. The resource name of the parent location, in the - format - ``projects/{project_id_or_number}/locations/{location_id}`` - page_size (int): - Optional. The maximum number of metadata jobs - to return. The service might return fewer jobs - than this value. If unspecified, at most 10 jobs - are returned. The maximum value is 1,000. - page_token (str): - Optional. The page token received from a previous - ``ListMetadataJobs`` call. Provide this token to retrieve - the subsequent page of results. When paginating, all other - parameters that are provided to the ``ListMetadataJobs`` - request must match the call that provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - ``labels.key1 = "value1"`` - - ``labels:key1`` - - ``name = "value"`` - - You can combine filters with ``AND``, ``OR``, and ``NOT`` - operators. - order_by (str): - Optional. The field to sort the results by, either ``name`` - or ``create_time``. If not specified, the ordering is - undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListMetadataJobsResponse(proto.Message): - r"""List metadata jobs response. - - Attributes: - metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): - Metadata jobs under the specified parent - location. - next_page_token (str): - A token to retrieve the next page of results. - If there are no more results in the list, the - value is empty. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - metadata_jobs: MutableSequence['MetadataJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='MetadataJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CancelMetadataJobRequest(proto.Message): - r"""Cancel metadata job request. - - Attributes: - name (str): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class MetadataJob(proto.Message): - r"""A metadata job resource. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. The name of the resource that the - configuration is applied to, in the format - ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - uid (str): - Output only. A system-generated, globally - unique ID for the metadata job. If the metadata - job is deleted and then re-created with the same - name, this ID is different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the metadata job - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the metadata job - was updated. - labels (MutableMapping[str, str]): - Optional. User-defined labels. - type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): - Required. Metadata job type. - import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): - Import job specification. - - This field is a member of `oneof`_ ``spec``. - import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): - Output only. Import job result. - - This field is a member of `oneof`_ ``result``. - status (google.cloud.dataplex_v1.types.MetadataJob.Status): - Output only. Metadata job status. - """ - class Type(proto.Enum): - r"""Metadata job type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - IMPORT (1): - Import job. - """ - TYPE_UNSPECIFIED = 0 - IMPORT = 1 - - class ImportJobResult(proto.Message): - r"""Results from a metadata import job. - - Attributes: - deleted_entries (int): - Output only. The total number of entries that - were deleted. - updated_entries (int): - Output only. The total number of entries that - were updated. - created_entries (int): - Output only. The total number of entries that - were created. - unchanged_entries (int): - Output only. The total number of entries that - were unchanged. - recreated_entries (int): - Output only. The total number of entries that - were recreated. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the status was - updated. - """ - - deleted_entries: int = proto.Field( - proto.INT64, - number=1, - ) - updated_entries: int = proto.Field( - proto.INT64, - number=2, - ) - created_entries: int = proto.Field( - proto.INT64, - number=3, - ) - unchanged_entries: int = proto.Field( - proto.INT64, - number=4, - ) - recreated_entries: int = proto.Field( - proto.INT64, - number=6, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - class ImportJobSpec(proto.Message): - r"""Job specification for a metadata import job - - Attributes: - source_storage_uri (str): - Optional. The URI of a Cloud Storage bucket or folder - (beginning with ``gs://`` and ending with ``/``) that - contains the metadata import files for this job. - - A metadata import file defines the values to set for each of - the entries and aspects in a metadata job. For more - information about how to create a metadata import file and - the file requirements, see `Metadata import - file `__. - - You can provide multiple metadata import files in the same - metadata job. The bucket or folder must contain at least one - metadata import file, in JSON Lines format (either ``.json`` - or ``.jsonl`` file extension). - - In ``FULL`` entry sync mode, don't save the metadata import - file in a folder named ``SOURCE_STORAGE_URI/deletions/``. - - **Caution**: If the metadata import file contains no data, - all entries and aspects that belong to the job's scope are - deleted. - source_create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the process that - created the metadata import files began. - scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): - Required. A boundary on the scope of impact - that the metadata import job can have. - entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): - Required. The sync mode for entries. Only ``FULL`` mode is - supported for entries. All entries in the job's scope are - modified. If an entry exists in Dataplex but isn't included - in the metadata import file, the entry is deleted when you - run the metadata job. - aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): - Required. The sync mode for aspects. Only ``INCREMENTAL`` - mode is supported for aspects. An aspect is modified only if - the metadata import file includes a reference to the aspect - in the ``update_mask`` field and the ``aspect_keys`` field. - log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): - Optional. The level of logs to write to Cloud Logging for - this job. - - Debug-level logs provide highly-detailed information for - troubleshooting, but their increased verbosity could incur - `additional - costs `__ that - might not be merited for all jobs. - - If unspecified, defaults to ``INFO``. - """ - class SyncMode(proto.Enum): - r"""Specifies how the entries and aspects in a metadata job are - updated. - - Values: - SYNC_MODE_UNSPECIFIED (0): - Sync mode unspecified. - FULL (1): - All resources in the job's scope are - modified. If a resource exists in Dataplex but - isn't included in the metadata import file, the - resource is deleted when you run the metadata - job. Use this mode to perform a full sync of the - set of entries in the job scope. - INCREMENTAL (2): - Only the entries and aspects that are - explicitly included in the metadata import file - are modified. Use this mode to modify a subset - of resources while leaving unreferenced - resources unchanged. - """ - SYNC_MODE_UNSPECIFIED = 0 - FULL = 1 - INCREMENTAL = 2 - - class LogLevel(proto.Enum): - r"""The level of logs to write to Cloud Logging for this job. - - Values: - LOG_LEVEL_UNSPECIFIED (0): - Log level unspecified. - DEBUG (1): - Debug-level logging. Captures detailed logs for each import - item. Use debug-level logging to troubleshoot issues with - specific import items. For example, use debug-level logging - to identify resources that are missing from the job scope, - entries or aspects that don't conform to the associated - entry type or aspect type, or other misconfigurations with - the metadata import file. - - Depending on the size of your metadata job and the number of - logs that are generated, debug-level logging might incur - `additional - costs `__. - INFO (2): - Info-level logging. Captures logs at the - overall job level. Includes aggregate logs about - import items, but doesn't specify which import - item has an error. - """ - LOG_LEVEL_UNSPECIFIED = 0 - DEBUG = 1 - INFO = 2 - - class ImportJobScope(proto.Message): - r"""A boundary on the scope of impact that the metadata import - job can have. - - Attributes: - entry_groups (MutableSequence[str]): - Required. The entry group that is in scope for the import - job, specified as a relative resource name in the format - ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. - Only entries that belong to the specified entry group are - affected by the job. - - Must contain exactly one element. The entry group and the - job must be in the same location. - entry_types (MutableSequence[str]): - Required. The entry types that are in scope for the import - job, specified as relative resource names in the format - ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. - The job modifies only the entries that belong to these entry - types. - - If the metadata import file attempts to modify an entry - whose type isn't included in this list, the import job is - halted before modifying any entries or aspects. - - The location of an entry type must either match the location - of the job, or the entry type must be global. - aspect_types (MutableSequence[str]): - Optional. The aspect types that are in scope for the import - job, specified as relative resource names in the format - ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - The job modifies only the aspects that belong to these - aspect types. - - If the metadata import file attempts to modify an aspect - whose type isn't included in this list, the import job is - halted before modifying any entries or aspects. - - The location of an aspect type must either match the - location of the job, or the aspect type must be global. - """ - - entry_groups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - entry_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - source_storage_uri: str = proto.Field( - proto.STRING, - number=1, - ) - source_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - scope: 'MetadataJob.ImportJobSpec.ImportJobScope' = proto.Field( - proto.MESSAGE, - number=2, - message='MetadataJob.ImportJobSpec.ImportJobScope', - ) - entry_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( - proto.ENUM, - number=3, - enum='MetadataJob.ImportJobSpec.SyncMode', - ) - aspect_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( - proto.ENUM, - number=4, - enum='MetadataJob.ImportJobSpec.SyncMode', - ) - log_level: 'MetadataJob.ImportJobSpec.LogLevel' = proto.Field( - proto.ENUM, - number=6, - enum='MetadataJob.ImportJobSpec.LogLevel', - ) - - class Status(proto.Message): - r"""Metadata job status. - - Attributes: - state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): - Output only. State of the metadata job. - message (str): - Output only. Message relating to the - progression of a metadata job. - completion_percent (int): - Output only. Progress tracking. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the status was - updated. - """ - class State(proto.Enum): - r"""State of a metadata job. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - QUEUED (1): - The job is queued. - RUNNING (2): - The job is running. - CANCELING (3): - The job is being canceled. - CANCELED (4): - The job is canceled. - SUCCEEDED (5): - The job succeeded. - FAILED (6): - The job failed. - SUCCEEDED_WITH_ERRORS (7): - The job completed with some errors. - """ - STATE_UNSPECIFIED = 0 - QUEUED = 1 - RUNNING = 2 - CANCELING = 3 - CANCELED = 4 - SUCCEEDED = 5 - FAILED = 6 - SUCCEEDED_WITH_ERRORS = 7 - - state: 'MetadataJob.Status.State' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataJob.Status.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - completion_percent: int = proto.Field( - proto.INT32, - number=3, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - import_spec: ImportJobSpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=ImportJobSpec, - ) - import_result: ImportJobResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=ImportJobResult, - ) - status: Status = proto.Field( - proto.MESSAGE, - number=7, - message=Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py deleted file mode 100644 index d78845393886..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'CreateContentRequest', - 'UpdateContentRequest', - 'DeleteContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'GetContentRequest', - }, -) - - -class CreateContentRequest(proto.Message): - r"""Create content request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - content (google.cloud.dataplex_v1.types.Content): - Required. Content resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - content: analyze.Content = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Content, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateContentRequest(proto.Message): - r"""Update content request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - content (google.cloud.dataplex_v1.types.Content): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - content: analyze.Content = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Content, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteContentRequest(proto.Message): - r"""Delete content request. - - Attributes: - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListContentRequest(proto.Message): - r"""List content request. Returns the BASIC Content view. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - page_size (int): - Optional. Maximum number of content to - return. The service may return fewer than this - value. If unspecified, at most 10 content will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListContent`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListContent`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - following formats are supported: - - labels.key1 = "value1" labels:key1 type = "NOTEBOOK" type = - "SQL_SCRIPT" - - These restrictions can be coinjoined with AND, OR and NOT - conjunctions. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListContentResponse(proto.Message): - r"""List content response. - - Attributes: - content (MutableSequence[google.cloud.dataplex_v1.types.Content]): - Content under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - content: MutableSequence[analyze.Content] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Content, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetContentRequest(proto.Message): - r"""Get content request. - - Attributes: - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - view (google.cloud.dataplex_v1.types.GetContentRequest.ContentView): - Optional. Specify content view to make a - partial request. - """ - class ContentView(proto.Enum): - r"""Specifies whether the request should return the full or the - partial representation. - - Values: - CONTENT_VIEW_UNSPECIFIED (0): - Content view not specified. Defaults to - BASIC. The API will default to the BASIC view. - BASIC (1): - Will not return the ``data_text`` field. - FULL (2): - Returns the complete proto. - """ - CONTENT_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: ContentView = proto.Field( - proto.ENUM, - number=2, - enum=ContentView, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py deleted file mode 100644 index 1e5f3aad0d9f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py +++ /dev/null @@ -1,540 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import processing - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataProfileSpec', - 'DataProfileResult', - }, -) - - -class DataProfileSpec(proto.Message): - r"""DataProfileScan related setting. - - Attributes: - sampling_percent (float): - Optional. The percentage of the records to be selected from - the dataset for DataScan. - - - Value can range between 0.0 and 100.0 with up to 3 - significant decimal digits. - - Sampling is not applied if ``sampling_percent`` is not - specified, 0 or - - 100. - row_filter (str): - Optional. A filter applied to all rows in a - single DataScan job. The filter needs to be a - valid SQL expression for a WHERE clause in - BigQuery standard SQL syntax. - Example: col1 >= 0 AND col2 < 10 - post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): - Optional. Actions to take upon job - completion.. - include_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): - Optional. The fields to include in data profile. - - If not specified, all fields at the time of profile scan job - execution are included, except for ones listed in - ``exclude_fields``. - exclude_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): - Optional. The fields to exclude from data profile. - - If specified, the fields will be excluded from data profile, - regardless of ``include_fields`` value. - """ - - class PostScanActions(proto.Message): - r"""The configuration of post scan actions of DataProfileScan - job. - - Attributes: - bigquery_export (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions.BigQueryExport): - Optional. If set, results will be exported to - the provided BigQuery table. - """ - - class BigQueryExport(proto.Message): - r"""The configuration of BigQuery export post scan action. - - Attributes: - results_table (str): - Optional. The BigQuery table to export DataProfileScan - results to. Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - """ - - results_table: str = proto.Field( - proto.STRING, - number=1, - ) - - bigquery_export: 'DataProfileSpec.PostScanActions.BigQueryExport' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileSpec.PostScanActions.BigQueryExport', - ) - - class SelectedFields(proto.Message): - r"""The specification for fields to include or exclude in data - profile scan. - - Attributes: - field_names (MutableSequence[str]): - Optional. Expected input is a list of fully - qualified names of fields as in the schema. - - Only top-level field names for nested fields are - supported. For instance, if 'x' is of nested - field type, listing 'x' is supported but 'x.y.z' - is not supported. Here 'y' and 'y.z' are nested - fields of 'x'. - """ - - field_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=2, - ) - row_filter: str = proto.Field( - proto.STRING, - number=3, - ) - post_scan_actions: PostScanActions = proto.Field( - proto.MESSAGE, - number=4, - message=PostScanActions, - ) - include_fields: SelectedFields = proto.Field( - proto.MESSAGE, - number=5, - message=SelectedFields, - ) - exclude_fields: SelectedFields = proto.Field( - proto.MESSAGE, - number=6, - message=SelectedFields, - ) - - -class DataProfileResult(proto.Message): - r"""DataProfileResult defines the output of DataProfileScan. Each - field of the table will have field type specific profile result. - - Attributes: - row_count (int): - The count of rows scanned. - profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): - The profile information per field. - scanned_data (google.cloud.dataplex_v1.types.ScannedData): - The data scanned for this result. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): - Output only. The result of post scan actions. - """ - - class Profile(proto.Message): - r"""Contains name, type, mode and field type specific profile - information. - - Attributes: - fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): - List of fields with structural and profile - information for each field. - """ - - class Field(proto.Message): - r"""A field within a table. - - Attributes: - name (str): - The name of the field. - type_ (str): - The data type retrieved from the schema of the data source. - For instance, for a BigQuery native table, it is the - `BigQuery Table - Schema `__. - For a Dataplex Entity, it is the `Entity - Schema `__. - mode (str): - The mode of the field. Possible values include: - - - REQUIRED, if it is a required field. - - NULLABLE, if it is an optional field. - - REPEATED, if it is a repeated field. - profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): - Profile information for the corresponding - field. - """ - - class ProfileInfo(proto.Message): - r"""The profile information for each field type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - null_ratio (float): - Ratio of rows with null value against total - scanned rows. - distinct_ratio (float): - Ratio of rows with distinct values against - total scanned rows. Not available for complex - non-groupable field type, including RECORD, - ARRAY, GEOGRAPHY, and JSON, as well as fields - with REPEATABLE mode. - top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): - The list of top N non-null values, frequency - and ratio with which they occur in the scanned - data. N is 10 or equal to the number of distinct - values in the field, whichever is smaller. Not - available for complex non-groupable field type, - including RECORD, ARRAY, GEOGRAPHY, and JSON, as - well as fields with REPEATABLE mode. - string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): - String type field information. - - This field is a member of `oneof`_ ``field_info``. - integer_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo): - Integer type field information. - - This field is a member of `oneof`_ ``field_info``. - double_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo): - Double type field information. - - This field is a member of `oneof`_ ``field_info``. - """ - - class StringFieldInfo(proto.Message): - r"""The profile information for a string type field. - - Attributes: - min_length (int): - Minimum length of non-null values in the - scanned data. - max_length (int): - Maximum length of non-null values in the - scanned data. - average_length (float): - Average length of non-null values in the - scanned data. - """ - - min_length: int = proto.Field( - proto.INT64, - number=1, - ) - max_length: int = proto.Field( - proto.INT64, - number=2, - ) - average_length: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - class IntegerFieldInfo(proto.Message): - r"""The profile information for an integer type field. - - Attributes: - average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. - standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. - min_ (int): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. - quartiles (MutableSequence[int]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of approximate - quartile values for the scanned data, occurring - in order Q1, median, Q3. - max_ (int): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. - """ - - average: float = proto.Field( - proto.DOUBLE, - number=1, - ) - standard_deviation: float = proto.Field( - proto.DOUBLE, - number=3, - ) - min_: int = proto.Field( - proto.INT64, - number=4, - ) - quartiles: MutableSequence[int] = proto.RepeatedField( - proto.INT64, - number=6, - ) - max_: int = proto.Field( - proto.INT64, - number=5, - ) - - class DoubleFieldInfo(proto.Message): - r"""The profile information for a double type field. - - Attributes: - average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. - standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. - min_ (float): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. - quartiles (MutableSequence[float]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of quartile values - for the scanned data, occurring in order Q1, - median, Q3. - max_ (float): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. - """ - - average: float = proto.Field( - proto.DOUBLE, - number=1, - ) - standard_deviation: float = proto.Field( - proto.DOUBLE, - number=3, - ) - min_: float = proto.Field( - proto.DOUBLE, - number=4, - ) - quartiles: MutableSequence[float] = proto.RepeatedField( - proto.DOUBLE, - number=6, - ) - max_: float = proto.Field( - proto.DOUBLE, - number=5, - ) - - class TopNValue(proto.Message): - r"""Top N non-null values in the scanned data. - - Attributes: - value (str): - String value of a top N non-null value. - count (int): - Count of the corresponding value in the - scanned data. - ratio (float): - Ratio of the corresponding value in the field - against the total number of rows in the scanned - data. - """ - - value: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - ratio: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - null_ratio: float = proto.Field( - proto.DOUBLE, - number=2, - ) - distinct_ratio: float = proto.Field( - proto.DOUBLE, - number=3, - ) - top_n_values: MutableSequence['DataProfileResult.Profile.Field.ProfileInfo.TopNValue'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='DataProfileResult.Profile.Field.ProfileInfo.TopNValue', - ) - string_profile: 'DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo' = proto.Field( - proto.MESSAGE, - number=101, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo', - ) - integer_profile: 'DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo' = proto.Field( - proto.MESSAGE, - number=102, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo', - ) - double_profile: 'DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo' = proto.Field( - proto.MESSAGE, - number=103, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - mode: str = proto.Field( - proto.STRING, - number=3, - ) - profile: 'DataProfileResult.Profile.Field.ProfileInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='DataProfileResult.Profile.Field.ProfileInfo', - ) - - fields: MutableSequence['DataProfileResult.Profile.Field'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfileResult.Profile.Field', - ) - - class PostScanActionsResult(proto.Message): - r"""The result of post scan actions of DataProfileScan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult): - Output only. The result of BigQuery export - post scan action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult.State): - Output only. Execution state for the BigQuery - exporting. - message (str): - Output only. Additional information about the - BigQuery exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileResult.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileResult.PostScanActionsResult.BigQueryExportResult', - ) - - row_count: int = proto.Field( - proto.INT64, - number=3, - ) - profile: Profile = proto.Field( - proto.MESSAGE, - number=4, - message=Profile, - ) - scanned_data: processing.ScannedData = proto.Field( - proto.MESSAGE, - number=5, - message=processing.ScannedData, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=6, - message=PostScanActionsResult, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py deleted file mode 100644 index 4f5adfd361ad..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py +++ /dev/null @@ -1,912 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import processing - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataQualitySpec', - 'DataQualityResult', - 'DataQualityRuleResult', - 'DataQualityDimensionResult', - 'DataQualityDimension', - 'DataQualityRule', - 'DataQualityColumnResult', - }, -) - - -class DataQualitySpec(proto.Message): - r"""DataQualityScan related setting. - - Attributes: - rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): - Required. The list of rules to evaluate - against a data source. At least one rule is - required. - sampling_percent (float): - Optional. The percentage of the records to be selected from - the dataset for DataScan. - - - Value can range between 0.0 and 100.0 with up to 3 - significant decimal digits. - - Sampling is not applied if ``sampling_percent`` is not - specified, 0 or - - 100. - row_filter (str): - Optional. A filter applied to all rows in a - single DataScan job. The filter needs to be a - valid SQL expression for a WHERE clause in - BigQuery standard SQL syntax. - Example: col1 >= 0 AND col2 < 10 - post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): - Optional. Actions to take upon job - completion. - """ - - class PostScanActions(proto.Message): - r"""The configuration of post scan actions of DataQualityScan. - - Attributes: - bigquery_export (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.BigQueryExport): - Optional. If set, results will be exported to - the provided BigQuery table. - notification_report (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.NotificationReport): - Optional. If set, results will be sent to the - provided notification receipts upon triggers. - """ - - class BigQueryExport(proto.Message): - r"""The configuration of BigQuery export post scan action. - - Attributes: - results_table (str): - Optional. The BigQuery table to export DataQualityScan - results to. Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - """ - - results_table: str = proto.Field( - proto.STRING, - number=1, - ) - - class Recipients(proto.Message): - r"""The individuals or groups who are designated to receive - notifications upon triggers. - - Attributes: - emails (MutableSequence[str]): - Optional. The email recipients who will - receive the DataQualityScan results report. - """ - - emails: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - class ScoreThresholdTrigger(proto.Message): - r"""This trigger is triggered when the DQ score in the job result - is less than a specified input score. - - Attributes: - score_threshold (float): - Optional. The score range is in [0,100]. - """ - - score_threshold: float = proto.Field( - proto.FLOAT, - number=2, - ) - - class JobFailureTrigger(proto.Message): - r"""This trigger is triggered when the scan job itself fails, - regardless of the result. - - """ - - class JobEndTrigger(proto.Message): - r"""This trigger is triggered whenever a scan job run ends, - regardless of the result. - - """ - - class NotificationReport(proto.Message): - r"""The configuration of notification report post scan action. - - Attributes: - recipients (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.Recipients): - Required. The recipients who will receive the - notification report. - score_threshold_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.ScoreThresholdTrigger): - Optional. If set, report will be sent when - score threshold is met. - job_failure_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobFailureTrigger): - Optional. If set, report will be sent when a - scan job fails. - job_end_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobEndTrigger): - Optional. If set, report will be sent when a - scan job ends. - """ - - recipients: 'DataQualitySpec.PostScanActions.Recipients' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualitySpec.PostScanActions.Recipients', - ) - score_threshold_trigger: 'DataQualitySpec.PostScanActions.ScoreThresholdTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='DataQualitySpec.PostScanActions.ScoreThresholdTrigger', - ) - job_failure_trigger: 'DataQualitySpec.PostScanActions.JobFailureTrigger' = proto.Field( - proto.MESSAGE, - number=4, - message='DataQualitySpec.PostScanActions.JobFailureTrigger', - ) - job_end_trigger: 'DataQualitySpec.PostScanActions.JobEndTrigger' = proto.Field( - proto.MESSAGE, - number=5, - message='DataQualitySpec.PostScanActions.JobEndTrigger', - ) - - bigquery_export: 'DataQualitySpec.PostScanActions.BigQueryExport' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualitySpec.PostScanActions.BigQueryExport', - ) - notification_report: 'DataQualitySpec.PostScanActions.NotificationReport' = proto.Field( - proto.MESSAGE, - number=2, - message='DataQualitySpec.PostScanActions.NotificationReport', - ) - - rules: MutableSequence['DataQualityRule'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataQualityRule', - ) - sampling_percent: float = proto.Field( - proto.FLOAT, - number=4, - ) - row_filter: str = proto.Field( - proto.STRING, - number=5, - ) - post_scan_actions: PostScanActions = proto.Field( - proto.MESSAGE, - number=6, - message=PostScanActions, - ) - - -class DataQualityResult(proto.Message): - r"""The output of a DataQualityScan. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - passed (bool): - Overall data quality result -- ``true`` if all rules passed. - score (float): - Output only. The overall data quality score. - - The score ranges between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): - A list of results at the dimension level. - - A dimension will have a corresponding - ``DataQualityDimensionResult`` if and only if there is at - least one rule with the 'dimension' field set to it. - columns (MutableSequence[google.cloud.dataplex_v1.types.DataQualityColumnResult]): - Output only. A list of results at the column level. - - A column will have a corresponding - ``DataQualityColumnResult`` if and only if there is at least - one rule with the 'column' field set to it. - rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRuleResult]): - A list of all the rules in a job, and their - results. - row_count (int): - The count of rows processed. - scanned_data (google.cloud.dataplex_v1.types.ScannedData): - The data scanned for this result. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): - Output only. The result of post scan actions. - """ - - class PostScanActionsResult(proto.Message): - r"""The result of post scan actions of DataQualityScan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult): - Output only. The result of BigQuery export - post scan action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult.State): - Output only. Execution state for the BigQuery - exporting. - message (str): - Output only. Additional information about the - BigQuery exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataQualityResult.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityResult.PostScanActionsResult.BigQueryExportResult', - ) - - passed: bool = proto.Field( - proto.BOOL, - number=5, - ) - score: float = proto.Field( - proto.FLOAT, - number=9, - optional=True, - ) - dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataQualityDimensionResult', - ) - columns: MutableSequence['DataQualityColumnResult'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='DataQualityColumnResult', - ) - rules: MutableSequence['DataQualityRuleResult'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='DataQualityRuleResult', - ) - row_count: int = proto.Field( - proto.INT64, - number=4, - ) - scanned_data: processing.ScannedData = proto.Field( - proto.MESSAGE, - number=7, - message=processing.ScannedData, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=8, - message=PostScanActionsResult, - ) - - -class DataQualityRuleResult(proto.Message): - r"""DataQualityRuleResult provides a more detailed, per-rule view - of the results. - - Attributes: - rule (google.cloud.dataplex_v1.types.DataQualityRule): - The rule specified in the DataQualitySpec, as - is. - passed (bool): - Whether the rule passed or failed. - evaluated_count (int): - The number of rows a rule was evaluated against. - - This field is only valid for row-level type rules. - - Evaluated count can be configured to either - - - include all rows (default) - with ``null`` rows - automatically failing rule evaluation, or - - exclude ``null`` rows from the ``evaluated_count``, by - setting ``ignore_nulls = true``. - passed_count (int): - The number of rows which passed a rule - evaluation. - This field is only valid for row-level type - rules. - null_count (int): - The number of rows with null values in the - specified column. - pass_ratio (float): - The ratio of **passed_count / evaluated_count**. - - This field is only valid for row-level type rules. - failing_rows_query (str): - The query to find rows that did not pass this - rule. - This field is only valid for row-level type - rules. - assertion_row_count (int): - Output only. The number of rows returned by - the SQL statement in a SQL assertion rule. - - This field is only valid for SQL assertion - rules. - """ - - rule: 'DataQualityRule' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityRule', - ) - passed: bool = proto.Field( - proto.BOOL, - number=7, - ) - evaluated_count: int = proto.Field( - proto.INT64, - number=9, - ) - passed_count: int = proto.Field( - proto.INT64, - number=8, - ) - null_count: int = proto.Field( - proto.INT64, - number=5, - ) - pass_ratio: float = proto.Field( - proto.DOUBLE, - number=6, - ) - failing_rows_query: str = proto.Field( - proto.STRING, - number=10, - ) - assertion_row_count: int = proto.Field( - proto.INT64, - number=11, - ) - - -class DataQualityDimensionResult(proto.Message): - r"""DataQualityDimensionResult provides a more detailed, - per-dimension view of the results. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dimension (google.cloud.dataplex_v1.types.DataQualityDimension): - Output only. The dimension config specified - in the DataQualitySpec, as is. - passed (bool): - Whether the dimension passed or failed. - score (float): - Output only. The dimension-level data quality score for this - data scan job if and only if the 'dimension' field is set. - - The score ranges between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - """ - - dimension: 'DataQualityDimension' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityDimension', - ) - passed: bool = proto.Field( - proto.BOOL, - number=3, - ) - score: float = proto.Field( - proto.FLOAT, - number=4, - optional=True, - ) - - -class DataQualityDimension(proto.Message): - r"""A dimension captures data quality intent about a defined - subset of the rules specified. - - Attributes: - name (str): - The dimension name a rule belongs to. Supported dimensions - are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "FRESHNESS", "VOLUME"] - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DataQualityRule(proto.Message): - r"""A rule captures data quality intent about a data source. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RangeExpectation): - Row-level rule which evaluates whether each - column value lies between a specified range. - - This field is a member of `oneof`_ ``rule_type``. - non_null_expectation (google.cloud.dataplex_v1.types.DataQualityRule.NonNullExpectation): - Row-level rule which evaluates whether each - column value is null. - - This field is a member of `oneof`_ ``rule_type``. - set_expectation (google.cloud.dataplex_v1.types.DataQualityRule.SetExpectation): - Row-level rule which evaluates whether each - column value is contained by a specified set. - - This field is a member of `oneof`_ ``rule_type``. - regex_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RegexExpectation): - Row-level rule which evaluates whether each - column value matches a specified regex. - - This field is a member of `oneof`_ ``rule_type``. - uniqueness_expectation (google.cloud.dataplex_v1.types.DataQualityRule.UniquenessExpectation): - Row-level rule which evaluates whether each - column value is unique. - - This field is a member of `oneof`_ ``rule_type``. - statistic_range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation): - Aggregate rule which evaluates whether the - column aggregate statistic lies between a - specified range. - - This field is a member of `oneof`_ ``rule_type``. - row_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RowConditionExpectation): - Row-level rule which evaluates whether each - row in a table passes the specified condition. - - This field is a member of `oneof`_ ``rule_type``. - table_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.TableConditionExpectation): - Aggregate rule which evaluates whether the - provided expression is true for a table. - - This field is a member of `oneof`_ ``rule_type``. - sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): - Aggregate rule which evaluates the number of - rows returned for the provided statement. If any - rows are returned, this rule fails. - - This field is a member of `oneof`_ ``rule_type``. - column (str): - Optional. The unnested column which this rule - is evaluated against. - ignore_null (bool): - Optional. Rows with ``null`` values will automatically fail - a rule, unless ``ignore_null`` is ``true``. In that case, - such ``null`` rows are trivially considered passing. - - This field is only valid for the following type of rules: - - - RangeExpectation - - RegexExpectation - - SetExpectation - - UniquenessExpectation - dimension (str): - Required. The dimension a rule belongs to. Results are also - aggregated at the dimension level. Supported dimensions are - **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "FRESHNESS", "VOLUME"]** - threshold (float): - Optional. The minimum ratio of **passing_rows / total_rows** - required to pass this rule, with a range of [0.0, 1.0]. - - 0 indicates default value (i.e. 1.0). - - This field is only valid for row-level type rules. - name (str): - Optional. A mutable name for the rule. - - - The name must contain only letters (a-z, A-Z), numbers - (0-9), or hyphens (-). - - The maximum length is 63 characters. - - Must start with a letter. - - Must end with a number or a letter. - description (str): - Optional. Description of the rule. - - - The maximum length is 1,024 characters. - suspended (bool): - Optional. Whether the Rule is active or - suspended. Default is false. - """ - - class RangeExpectation(proto.Message): - r"""Evaluates whether each column value lies between a specified - range. - - Attributes: - min_value (str): - Optional. The minimum column value allowed for a row to pass - this validation. At least one of ``min_value`` and - ``max_value`` need to be provided. - max_value (str): - Optional. The maximum column value allowed for a row to pass - this validation. At least one of ``min_value`` and - ``max_value`` need to be provided. - strict_min_enabled (bool): - Optional. Whether each value needs to be strictly greater - than ('>') the minimum, or if equality is allowed. - - Only relevant if a ``min_value`` has been defined. Default = - false. - strict_max_enabled (bool): - Optional. Whether each value needs to be strictly lesser - than ('<') the maximum, or if equality is allowed. - - Only relevant if a ``max_value`` has been defined. Default = - false. - """ - - min_value: str = proto.Field( - proto.STRING, - number=1, - ) - max_value: str = proto.Field( - proto.STRING, - number=2, - ) - strict_min_enabled: bool = proto.Field( - proto.BOOL, - number=3, - ) - strict_max_enabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class NonNullExpectation(proto.Message): - r"""Evaluates whether each column value is null. - """ - - class SetExpectation(proto.Message): - r"""Evaluates whether each column value is contained by a - specified set. - - Attributes: - values (MutableSequence[str]): - Optional. Expected values for the column - value. - """ - - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - class RegexExpectation(proto.Message): - r"""Evaluates whether each column value matches a specified - regex. - - Attributes: - regex (str): - Optional. A regular expression the column - value is expected to match. - """ - - regex: str = proto.Field( - proto.STRING, - number=1, - ) - - class UniquenessExpectation(proto.Message): - r"""Evaluates whether the column has duplicates. - """ - - class StatisticRangeExpectation(proto.Message): - r"""Evaluates whether the column aggregate statistic lies between - a specified range. - - Attributes: - statistic (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation.ColumnStatistic): - Optional. The aggregate metric to evaluate. - min_value (str): - Optional. The minimum column statistic value allowed for a - row to pass this validation. - - At least one of ``min_value`` and ``max_value`` need to be - provided. - max_value (str): - Optional. The maximum column statistic value allowed for a - row to pass this validation. - - At least one of ``min_value`` and ``max_value`` need to be - provided. - strict_min_enabled (bool): - Optional. Whether column statistic needs to be strictly - greater than ('>') the minimum, or if equality is allowed. - - Only relevant if a ``min_value`` has been defined. Default = - false. - strict_max_enabled (bool): - Optional. Whether column statistic needs to be strictly - lesser than ('<') the maximum, or if equality is allowed. - - Only relevant if a ``max_value`` has been defined. Default = - false. - """ - class ColumnStatistic(proto.Enum): - r"""The list of aggregate metrics a rule can be evaluated - against. - - Values: - STATISTIC_UNDEFINED (0): - Unspecified statistic type - MEAN (1): - Evaluate the column mean - MIN (2): - Evaluate the column min - MAX (3): - Evaluate the column max - """ - STATISTIC_UNDEFINED = 0 - MEAN = 1 - MIN = 2 - MAX = 3 - - statistic: 'DataQualityRule.StatisticRangeExpectation.ColumnStatistic' = proto.Field( - proto.ENUM, - number=1, - enum='DataQualityRule.StatisticRangeExpectation.ColumnStatistic', - ) - min_value: str = proto.Field( - proto.STRING, - number=2, - ) - max_value: str = proto.Field( - proto.STRING, - number=3, - ) - strict_min_enabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - strict_max_enabled: bool = proto.Field( - proto.BOOL, - number=5, - ) - - class RowConditionExpectation(proto.Message): - r"""Evaluates whether each row passes the specified condition. - - The SQL expression needs to use BigQuery standard SQL syntax and - should produce a boolean value per row as the result. - - Example: col1 >= 0 AND col2 < 10 - - Attributes: - sql_expression (str): - Optional. The SQL expression. - """ - - sql_expression: str = proto.Field( - proto.STRING, - number=1, - ) - - class TableConditionExpectation(proto.Message): - r"""Evaluates whether the provided expression is true. - - The SQL expression needs to use BigQuery standard SQL syntax and - should produce a scalar boolean result. - - Example: MIN(col1) >= 0 - - Attributes: - sql_expression (str): - Optional. The SQL expression. - """ - - sql_expression: str = proto.Field( - proto.STRING, - number=1, - ) - - class SqlAssertion(proto.Message): - r"""A SQL statement that is evaluated to return rows that match an - invalid state. If any rows are are returned, this rule fails. - - The SQL statement must use BigQuery standard SQL syntax, and must - not contain any semicolons. - - You can use the data reference parameter ``${data()}`` to reference - the source table with all of its precondition filters applied. - Examples of precondition filters include row filters, incremental - data filters, and sampling. For more information, see `Data - reference - parameter `__. - - Example: ``SELECT * FROM ${data()} WHERE price < 0`` - - Attributes: - sql_statement (str): - Optional. The SQL statement. - """ - - sql_statement: str = proto.Field( - proto.STRING, - number=1, - ) - - range_expectation: RangeExpectation = proto.Field( - proto.MESSAGE, - number=1, - oneof='rule_type', - message=RangeExpectation, - ) - non_null_expectation: NonNullExpectation = proto.Field( - proto.MESSAGE, - number=2, - oneof='rule_type', - message=NonNullExpectation, - ) - set_expectation: SetExpectation = proto.Field( - proto.MESSAGE, - number=3, - oneof='rule_type', - message=SetExpectation, - ) - regex_expectation: RegexExpectation = proto.Field( - proto.MESSAGE, - number=4, - oneof='rule_type', - message=RegexExpectation, - ) - uniqueness_expectation: UniquenessExpectation = proto.Field( - proto.MESSAGE, - number=100, - oneof='rule_type', - message=UniquenessExpectation, - ) - statistic_range_expectation: StatisticRangeExpectation = proto.Field( - proto.MESSAGE, - number=101, - oneof='rule_type', - message=StatisticRangeExpectation, - ) - row_condition_expectation: RowConditionExpectation = proto.Field( - proto.MESSAGE, - number=200, - oneof='rule_type', - message=RowConditionExpectation, - ) - table_condition_expectation: TableConditionExpectation = proto.Field( - proto.MESSAGE, - number=201, - oneof='rule_type', - message=TableConditionExpectation, - ) - sql_assertion: SqlAssertion = proto.Field( - proto.MESSAGE, - number=202, - oneof='rule_type', - message=SqlAssertion, - ) - column: str = proto.Field( - proto.STRING, - number=500, - ) - ignore_null: bool = proto.Field( - proto.BOOL, - number=501, - ) - dimension: str = proto.Field( - proto.STRING, - number=502, - ) - threshold: float = proto.Field( - proto.DOUBLE, - number=503, - ) - name: str = proto.Field( - proto.STRING, - number=504, - ) - description: str = proto.Field( - proto.STRING, - number=505, - ) - suspended: bool = proto.Field( - proto.BOOL, - number=506, - ) - - -class DataQualityColumnResult(proto.Message): - r"""DataQualityColumnResult provides a more detailed, per-column - view of the results. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - column (str): - Output only. The column specified in the - DataQualityRule. - score (float): - Output only. The column-level data quality score for this - data scan job if and only if the 'column' field is set. - - The score ranges between between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - """ - - column: str = proto.Field( - proto.STRING, - number=1, - ) - score: float = proto.Field( - proto.FLOAT, - number=2, - optional=True, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py deleted file mode 100644 index e837b4b01d37..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py +++ /dev/null @@ -1,976 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import security -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataTaxonomy', - 'DataAttribute', - 'DataAttributeBinding', - 'CreateDataTaxonomyRequest', - 'UpdateDataTaxonomyRequest', - 'GetDataTaxonomyRequest', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'DeleteDataTaxonomyRequest', - 'CreateDataAttributeRequest', - 'UpdateDataAttributeRequest', - 'GetDataAttributeRequest', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'DeleteDataAttributeRequest', - 'CreateDataAttributeBindingRequest', - 'UpdateDataAttributeBindingRequest', - 'GetDataAttributeBindingRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'DeleteDataAttributeBindingRequest', - }, -) - - -class DataTaxonomy(proto.Message): - r"""DataTaxonomy represents a set of hierarchical DataAttributes - resources, grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage PII data. - It is defined at project level. - - Attributes: - name (str): - Output only. The relative resource name of the DataTaxonomy, - of the form: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}. - uid (str): - Output only. System generated globally unique - ID for the dataTaxonomy. This ID will be - different if the DataTaxonomy is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataTaxonomy - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataTaxonomy - was last updated. - description (str): - Optional. Description of the DataTaxonomy. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataTaxonomy. - attribute_count (int): - Output only. The number of attributes in the - DataTaxonomy. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - class_count (int): - Output only. The number of classes in the - DataTaxonomy. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - attribute_count: int = proto.Field( - proto.INT32, - number=9, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - class_count: int = proto.Field( - proto.INT32, - number=11, - ) - - -class DataAttribute(proto.Message): - r"""Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a hierarchy. A single - dataAttribute resource can contain specs of multiple types - - :: - - PII - - ResourceAccessSpec : - - readers :foo@bar.com - - DataAccessSpec : - - readers :bar@foo.com - - Attributes: - name (str): - Output only. The relative resource name of the - dataAttribute, of the form: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}. - uid (str): - Output only. System generated globally unique - ID for the DataAttribute. This ID will be - different if the DataAttribute is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataAttribute - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataAttribute - was last updated. - description (str): - Optional. Description of the DataAttribute. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataAttribute. - parent_id (str): - Optional. The ID of the parent DataAttribute resource, - should belong to the same data taxonomy. Circular dependency - in parent chain is not valid. Maximum depth of the hierarchy - allowed is 4. [a -> b -> c -> d -> e, depth = 4] - attribute_count (int): - Output only. The number of child attributes - present for this attribute. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - resource_access_spec (google.cloud.dataplex_v1.types.ResourceAccessSpec): - Optional. Specified when applied to a - resource (eg: Cloud Storage bucket, BigQuery - dataset, BigQuery table). - data_access_spec (google.cloud.dataplex_v1.types.DataAccessSpec): - Optional. Specified when applied to data - stored on the resource (eg: rows, columns in - BigQuery Tables). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - parent_id: str = proto.Field( - proto.STRING, - number=8, - ) - attribute_count: int = proto.Field( - proto.INT32, - number=9, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - resource_access_spec: security.ResourceAccessSpec = proto.Field( - proto.MESSAGE, - number=100, - message=security.ResourceAccessSpec, - ) - data_access_spec: security.DataAccessSpec = proto.Field( - proto.MESSAGE, - number=101, - message=security.DataAccessSpec, - ) - - -class DataAttributeBinding(proto.Message): - r"""DataAttributeBinding represents binding of attributes to - resources. Eg: Bind 'CustomerInfo' entity with 'PII' attribute. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the Data - Attribute Binding, of the form: - projects/{project_number}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id} - uid (str): - Output only. System generated globally unique - ID for the DataAttributeBinding. This ID will be - different if the DataAttributeBinding is deleted - and re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the - DataAttributeBinding was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the - DataAttributeBinding was last updated. - description (str): - Optional. Description of the - DataAttributeBinding. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataAttributeBinding. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. Etags - must be used when calling the - DeleteDataAttributeBinding and the - UpdateDataAttributeBinding method. - resource (str): - Optional. Immutable. The resource name of the resource that - is associated to attributes. Presently, only entity resource - is supported in the form: - projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity_id} - Must belong in the same project and region as the attribute - binding, and there can only exist one active binding for a - resource. - - This field is a member of `oneof`_ ``resource_reference``. - attributes (MutableSequence[str]): - Optional. List of attributes to be associated with the - resource, provided in the form: - projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - paths (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding.Path]): - Optional. The list of paths for items within - the associated resource (eg. columns and - partitions within a table) along with attribute - bindings. - """ - - class Path(proto.Message): - r"""Represents a subresource of the given resource, and - associated bindings with it. Currently supported subresources - are column and partition schema fields within a table. - - Attributes: - name (str): - Required. The name identifier of the path. - Nested columns should be of the form: - 'address.city'. - attributes (MutableSequence[str]): - Optional. List of attributes to be associated with the path - of the resource, provided in the form: - projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - attributes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - resource: str = proto.Field( - proto.STRING, - number=100, - oneof='resource_reference', - ) - attributes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=110, - ) - paths: MutableSequence[Path] = proto.RepeatedField( - proto.MESSAGE, - number=120, - message=Path, - ) - - -class CreateDataTaxonomyRequest(proto.Message): - r"""Create DataTaxonomy request. - - Attributes: - parent (str): - Required. The resource name of the data taxonomy location, - of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. - data_taxonomy_id (str): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. DataTaxonomy resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_taxonomy_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_taxonomy: 'DataTaxonomy' = proto.Field( - proto.MESSAGE, - number=3, - message='DataTaxonomy', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataTaxonomyRequest(proto.Message): - r"""Update DataTaxonomy request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_taxonomy: 'DataTaxonomy' = proto.Field( - proto.MESSAGE, - number=2, - message='DataTaxonomy', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataTaxonomyRequest(proto.Message): - r"""Get DataTaxonomy request. - - Attributes: - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataTaxonomiesRequest(proto.Message): - r"""List DataTaxonomies request. - - Attributes: - parent (str): - Required. The resource name of the DataTaxonomy location, of - the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - page_size (int): - Optional. Maximum number of DataTaxonomies to - return. The service may return fewer than this - value. If unspecified, at most 10 DataTaxonomies - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataTaxonomies`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataTaxonomies`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataTaxonomiesResponse(proto.Message): - r"""List DataTaxonomies response. - - Attributes: - data_taxonomies (MutableSequence[google.cloud.dataplex_v1.types.DataTaxonomy]): - DataTaxonomies under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_taxonomies: MutableSequence['DataTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataTaxonomy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataTaxonomyRequest(proto.Message): - r"""Delete DataTaxonomy request. - - Attributes: - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - etag (str): - Optional. If the client provided etag value - does not match the current etag value,the - DeleteDataTaxonomy method returns an ABORTED - error. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDataAttributeRequest(proto.Message): - r"""Create DataAttribute request. - - Attributes: - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - data_attribute_id (str): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. DataAttribute resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_attribute_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_attribute: 'DataAttribute' = proto.Field( - proto.MESSAGE, - number=3, - message='DataAttribute', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataAttributeRequest(proto.Message): - r"""Update DataAttribute request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_attribute: 'DataAttribute' = proto.Field( - proto.MESSAGE, - number=2, - message='DataAttribute', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataAttributeRequest(proto.Message): - r"""Get DataAttribute request. - - Attributes: - name (str): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataAttributesRequest(proto.Message): - r"""List DataAttributes request. - - Attributes: - parent (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - page_size (int): - Optional. Maximum number of DataAttributes to - return. The service may return fewer than this - value. If unspecified, at most 10 dataAttributes - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataAttributes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataAttributes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataAttributesResponse(proto.Message): - r"""List DataAttributes response. - - Attributes: - data_attributes (MutableSequence[google.cloud.dataplex_v1.types.DataAttribute]): - DataAttributes under the given parent - DataTaxonomy. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_attributes: MutableSequence['DataAttribute'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataAttribute', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataAttributeRequest(proto.Message): - r"""Delete DataAttribute request. - - Attributes: - name (str): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteDataAttribute method returns an ABORTED - error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDataAttributeBindingRequest(proto.Message): - r"""Create DataAttributeBinding request. - - Attributes: - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - data_attribute_binding_id (str): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. DataAttributeBinding resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_attribute_binding_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_attribute_binding: 'DataAttributeBinding' = proto.Field( - proto.MESSAGE, - number=3, - message='DataAttributeBinding', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataAttributeBindingRequest(proto.Message): - r"""Update DataAttributeBinding request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_attribute_binding: 'DataAttributeBinding' = proto.Field( - proto.MESSAGE, - number=2, - message='DataAttributeBinding', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataAttributeBindingRequest(proto.Message): - r"""Get DataAttributeBinding request. - - Attributes: - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataAttributeBindingsRequest(proto.Message): - r"""List DataAttributeBindings request. - - Attributes: - parent (str): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - page_size (int): - Optional. Maximum number of - DataAttributeBindings to return. The service may - return fewer than this value. If unspecified, at - most 10 DataAttributeBindings will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataAttributeBindings`` call. Provide this to retrieve - the subsequent page. When paginating, all other parameters - provided to ``ListDataAttributeBindings`` must match the - call that provided the page token. - filter (str): - Optional. Filter request. - Filter using resource: - filter=resource:"resource-name" Filter using - attribute: filter=attributes:"attribute-name" - Filter using attribute in paths list: - - filter=paths.attributes:"attribute-name". - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataAttributeBindingsResponse(proto.Message): - r"""List DataAttributeBindings response. - - Attributes: - data_attribute_bindings (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding]): - DataAttributeBindings under the given parent - Location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_attribute_bindings: MutableSequence['DataAttributeBinding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataAttributeBinding', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataAttributeBindingRequest(proto.Message): - r"""Delete DataAttributeBinding request. - - Attributes: - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - etag (str): - Required. If the client provided etag value - does not match the current etag value, the - DeleteDataAttributeBindingRequest method returns - an ABORTED error response. Etags must be used - when calling the DeleteDataAttributeBinding. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py deleted file mode 100644 index 5dc0236e91fd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py +++ /dev/null @@ -1,905 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataScanType', - 'CreateDataScanRequest', - 'UpdateDataScanRequest', - 'DeleteDataScanRequest', - 'GetDataScanRequest', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'GetDataScanJobRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'DataScan', - 'DataScanJob', - }, -) - - -class DataScanType(proto.Enum): - r"""The type of data scan. - - Values: - DATA_SCAN_TYPE_UNSPECIFIED (0): - The data scan type is unspecified. - DATA_QUALITY (1): - Data quality scan. - DATA_PROFILE (2): - Data profile scan. - DATA_DISCOVERY (3): - Data discovery scan. - """ - DATA_SCAN_TYPE_UNSPECIFIED = 0 - DATA_QUALITY = 1 - DATA_PROFILE = 2 - DATA_DISCOVERY = 3 - - -class CreateDataScanRequest(proto.Message): - r"""Create dataScan request. - - Attributes: - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource. - data_scan_id (str): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - validate_only (bool): - Optional. Only validate the request, but do not perform - mutations. The default is ``false``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_scan: 'DataScan' = proto.Field( - proto.MESSAGE, - number=2, - message='DataScan', - ) - data_scan_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataScanRequest(proto.Message): - r"""Update dataScan request. - - Attributes: - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. Only validate the request, but do not perform - mutations. The default is ``false``. - """ - - data_scan: 'DataScan' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScan', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteDataScanRequest(proto.Message): - r"""Delete dataScan request. - - Attributes: - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDataScanRequest(proto.Message): - r"""Get dataScan request. - - Attributes: - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): - Optional. Select the DataScan view to return. Defaults to - ``BASIC``. - """ - class DataScanView(proto.Enum): - r"""DataScan view options. - - Values: - DATA_SCAN_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Basic view that does not include *spec* and *result*. - FULL (10): - Include everything. - """ - DATA_SCAN_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 10 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: DataScanView = proto.Field( - proto.ENUM, - number=2, - enum=DataScanView, - ) - - -class ListDataScansRequest(proto.Message): - r"""List dataScans request. - - Attributes: - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. - page_size (int): - Optional. Maximum number of dataScans to - return. The service may return fewer than this - value. If unspecified, at most 500 scans will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataScans`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataScans`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataScansResponse(proto.Message): - r"""List dataScans response. - - Attributes: - data_scans (MutableSequence[google.cloud.dataplex_v1.types.DataScan]): - DataScans (``BASIC`` view only) under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_scans: MutableSequence['DataScan'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataScan', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class RunDataScanRequest(proto.Message): - r"""Run DataScan Request - - Attributes: - name (str): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - - Only **OnDemand** data scans are allowed. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RunDataScanResponse(proto.Message): - r"""Run DataScan Response. - - Attributes: - job (google.cloud.dataplex_v1.types.DataScanJob): - DataScanJob created by RunDataScan request. - """ - - job: 'DataScanJob' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScanJob', - ) - - -class GetDataScanJobRequest(proto.Message): - r"""Get DataScanJob request. - - Attributes: - name (str): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): - Optional. Select the DataScanJob view to return. Defaults to - ``BASIC``. - """ - class DataScanJobView(proto.Enum): - r"""DataScanJob view options. - - Values: - DATA_SCAN_JOB_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Basic view that does not include *spec* and *result*. - FULL (10): - Include everything. - """ - DATA_SCAN_JOB_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 10 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: DataScanJobView = proto.Field( - proto.ENUM, - number=2, - enum=DataScanJobView, - ) - - -class ListDataScanJobsRequest(proto.Message): - r"""List DataScanJobs request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - page_size (int): - Optional. Maximum number of DataScanJobs to - return. The service may return fewer than this - value. If unspecified, at most 10 DataScanJobs - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataScanJobs`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataScanJobs`` must match the call that - provided the page token. - filter (str): - Optional. An expression for filtering the results of the - ListDataScanJobs request. - - If unspecified, all datascan jobs will be returned. Multiple - filters can be applied (with ``AND``, ``OR`` logical - operators). Filters are case-sensitive. - - Allowed fields are: - - - ``start_time`` - - ``end_time`` - - ``start_time`` and ``end_time`` expect RFC-3339 formatted - strings (e.g. 2018-10-08T18:30:00-07:00). - - For instance, 'start_time > 2018-10-08T00:00:00.123456789Z - AND end_time < 2018-10-09T00:00:00.123456789Z' limits - results to DataScanJobs between specified start and end - times. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDataScanJobsResponse(proto.Message): - r"""List DataScanJobs response. - - Attributes: - data_scan_jobs (MutableSequence[google.cloud.dataplex_v1.types.DataScanJob]): - DataScanJobs (``BASIC`` view only) under a given dataScan. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - data_scan_jobs: MutableSequence['DataScanJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataScanJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GenerateDataQualityRulesRequest(proto.Message): - r"""Request details for generating data quality rule - recommendations. - - Attributes: - name (str): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling job (a - data scan job where the job type is data profiling) - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GenerateDataQualityRulesResponse(proto.Message): - r"""Response details for data quality rule recommendations. - - Attributes: - rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): - The data quality rules that Dataplex - generates based on the results of a data - profiling scan. - """ - - rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=data_quality.DataQualityRule, - ) - - -class DataScan(proto.Message): - r"""Represents a user-visible job which provides the insights for the - related data source. - - For example: - - - Data Quality: generates queries based on the rules and runs - against the data to get data quality check results. - - Data Profile: analyzes the data in table(s) and generates - insights about the structure, content and relationships (such as - null percent, cardinality, min/max/mean, etc). - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the scan, of the - form: - ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - uid (str): - Output only. System generated globally unique - ID for the scan. This ID will be different if - the scan is deleted and re-created with the same - name. - description (str): - Optional. Description of the scan. - - - Must be between 1-1024 characters. - display_name (str): - Optional. User friendly display name. - - - Must be between 1-256 characters. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the scan. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the DataScan. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the scan was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the scan was last - updated. - data (google.cloud.dataplex_v1.types.DataSource): - Required. The data source for DataScan. - execution_spec (google.cloud.dataplex_v1.types.DataScan.ExecutionSpec): - Optional. DataScan execution settings. - - If not specified, the fields in it will use - their default values. - execution_status (google.cloud.dataplex_v1.types.DataScan.ExecutionStatus): - Output only. Status of the data scan - execution. - type_ (google.cloud.dataplex_v1.types.DataScanType): - Output only. The type of DataScan. - data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Settings for a data quality scan. - - This field is a member of `oneof`_ ``spec``. - data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Settings for a data profile scan. - - This field is a member of `oneof`_ ``spec``. - data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): - Settings for a data discovery scan. - - This field is a member of `oneof`_ ``spec``. - data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of a data quality - scan. - - This field is a member of `oneof`_ ``result``. - data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of a data profile - scan. - - This field is a member of `oneof`_ ``result``. - data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): - Output only. The result of a data discovery - scan. - - This field is a member of `oneof`_ ``result``. - """ - - class ExecutionSpec(proto.Message): - r"""DataScan execution settings. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - trigger (google.cloud.dataplex_v1.types.Trigger): - Optional. Spec related to how often and when a scan should - be triggered. - - If not specified, the default is ``OnDemand``, which means - the scan will not run until the user calls ``RunDataScan`` - API. - field (str): - Immutable. The unnested field (of type *Date* or - *Timestamp*) that contains values which monotonically - increase over time. - - If not specified, a data scan will run for all data in the - table. - - This field is a member of `oneof`_ ``incremental``. - """ - - trigger: processing.Trigger = proto.Field( - proto.MESSAGE, - number=1, - message=processing.Trigger, - ) - field: str = proto.Field( - proto.STRING, - number=100, - oneof='incremental', - ) - - class ExecutionStatus(proto.Message): - r"""Status of the data scan execution. - - Attributes: - latest_job_start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the latest DataScanJob started. - latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the latest DataScanJob ended. - latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the DataScanJob - execution was created. - """ - - latest_job_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - latest_job_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - latest_job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=6, - enum=resources.State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - data: processing.DataSource = proto.Field( - proto.MESSAGE, - number=9, - message=processing.DataSource, - ) - execution_spec: ExecutionSpec = proto.Field( - proto.MESSAGE, - number=10, - message=ExecutionSpec, - ) - execution_status: ExecutionStatus = proto.Field( - proto.MESSAGE, - number=11, - message=ExecutionStatus, - ) - type_: 'DataScanType' = proto.Field( - proto.ENUM, - number=12, - enum='DataScanType', - ) - data_quality_spec: data_quality.DataQualitySpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=data_quality.DataQualitySpec, - ) - data_profile_spec: data_profile.DataProfileSpec = proto.Field( - proto.MESSAGE, - number=101, - oneof='spec', - message=data_profile.DataProfileSpec, - ) - data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( - proto.MESSAGE, - number=102, - oneof='spec', - message=data_discovery.DataDiscoverySpec, - ) - data_quality_result: data_quality.DataQualityResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=data_quality.DataQualityResult, - ) - data_profile_result: data_profile.DataProfileResult = proto.Field( - proto.MESSAGE, - number=201, - oneof='result', - message=data_profile.DataProfileResult, - ) - data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( - proto.MESSAGE, - number=202, - oneof='result', - message=data_discovery.DataDiscoveryResult, - ) - - -class DataScanJob(proto.Message): - r"""A DataScanJob represents an instance of DataScan execution. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the DataScanJob, - of the form: - ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. - uid (str): - Output only. System generated globally unique - ID for the DataScanJob. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - was started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - ended. - state (google.cloud.dataplex_v1.types.DataScanJob.State): - Output only. Execution state for the - DataScanJob. - message (str): - Output only. Additional information about the - current state. - type_ (google.cloud.dataplex_v1.types.DataScanType): - Output only. The type of the parent DataScan. - data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Output only. Settings for a data quality - scan. - - This field is a member of `oneof`_ ``spec``. - data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Output only. Settings for a data profile - scan. - - This field is a member of `oneof`_ ``spec``. - data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): - Output only. Settings for a data discovery - scan. - - This field is a member of `oneof`_ ``spec``. - data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of a data quality - scan. - - This field is a member of `oneof`_ ``result``. - data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of a data profile - scan. - - This field is a member of `oneof`_ ``result``. - data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): - Output only. The result of a data discovery - scan. - - This field is a member of `oneof`_ ``result``. - """ - class State(proto.Enum): - r"""Execution state for the DataScanJob. - - Values: - STATE_UNSPECIFIED (0): - The DataScanJob state is unspecified. - RUNNING (1): - The DataScanJob is running. - CANCELING (2): - The DataScanJob is canceling. - CANCELLED (3): - The DataScanJob cancellation was successful. - SUCCEEDED (4): - The DataScanJob completed successfully. - FAILED (5): - The DataScanJob is no longer running due to - an error. - PENDING (7): - The DataScanJob has been created but not - started to run yet. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - CANCELING = 2 - CANCELLED = 3 - SUCCEEDED = 4 - FAILED = 5 - PENDING = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - message: str = proto.Field( - proto.STRING, - number=6, - ) - type_: 'DataScanType' = proto.Field( - proto.ENUM, - number=7, - enum='DataScanType', - ) - data_quality_spec: data_quality.DataQualitySpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=data_quality.DataQualitySpec, - ) - data_profile_spec: data_profile.DataProfileSpec = proto.Field( - proto.MESSAGE, - number=101, - oneof='spec', - message=data_profile.DataProfileSpec, - ) - data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( - proto.MESSAGE, - number=102, - oneof='spec', - message=data_discovery.DataDiscoverySpec, - ) - data_quality_result: data_quality.DataQualityResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=data_quality.DataQualityResult, - ) - data_profile_result: data_profile.DataProfileResult = proto.Field( - proto.MESSAGE, - number=201, - oneof='result', - message=data_profile.DataProfileResult, - ) - data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( - proto.MESSAGE, - number=202, - oneof='result', - message=data_discovery.DataDiscoveryResult, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py deleted file mode 100644 index 6e4c2bbb0ddb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py +++ /dev/null @@ -1,1352 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DiscoveryEvent', - 'JobEvent', - 'SessionEvent', - 'GovernanceEvent', - 'DataScanEvent', - 'DataQualityScanRuleResult', - }, -) - - -class DiscoveryEvent(proto.Message): - r"""The payload associated with Discovery data processing. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - lake_id (str): - The id of the associated lake. - zone_id (str): - The id of the associated zone. - asset_id (str): - The id of the associated asset. - data_location (str): - The data location associated with the event. - datascan_id (str): - The id of the associated datascan for - standalone discovery. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): - The type of the event being logged. - config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): - Details about discovery configuration in - effect. - - This field is a member of `oneof`_ ``details``. - entity (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityDetails): - Details about the entity associated with the - event. - - This field is a member of `oneof`_ ``details``. - partition (google.cloud.dataplex_v1.types.DiscoveryEvent.PartitionDetails): - Details about the partition associated with - the event. - - This field is a member of `oneof`_ ``details``. - action (google.cloud.dataplex_v1.types.DiscoveryEvent.ActionDetails): - Details about the action associated with the - event. - - This field is a member of `oneof`_ ``details``. - table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): - Details about the BigQuery table publishing - associated with the event. - - This field is a member of `oneof`_ ``details``. - """ - class EventType(proto.Enum): - r"""The type of the event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - CONFIG (1): - An event representing discovery configuration - in effect. - ENTITY_CREATED (2): - An event representing a metadata entity being - created. - ENTITY_UPDATED (3): - An event representing a metadata entity being - updated. - ENTITY_DELETED (4): - An event representing a metadata entity being - deleted. - PARTITION_CREATED (5): - An event representing a partition being - created. - PARTITION_UPDATED (6): - An event representing a partition being - updated. - PARTITION_DELETED (7): - An event representing a partition being - deleted. - TABLE_PUBLISHED (10): - An event representing a table being - published. - TABLE_UPDATED (11): - An event representing a table being updated. - TABLE_IGNORED (12): - An event representing a table being skipped - in publishing. - TABLE_DELETED (13): - An event representing a table being deleted. - """ - EVENT_TYPE_UNSPECIFIED = 0 - CONFIG = 1 - ENTITY_CREATED = 2 - ENTITY_UPDATED = 3 - ENTITY_DELETED = 4 - PARTITION_CREATED = 5 - PARTITION_UPDATED = 6 - PARTITION_DELETED = 7 - TABLE_PUBLISHED = 10 - TABLE_UPDATED = 11 - TABLE_IGNORED = 12 - TABLE_DELETED = 13 - - class EntityType(proto.Enum): - r"""The type of the entity. - - Values: - ENTITY_TYPE_UNSPECIFIED (0): - An unspecified event type. - TABLE (1): - Entities representing structured data. - FILESET (2): - Entities representing unstructured data. - """ - ENTITY_TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - class TableType(proto.Enum): - r"""The type of the published table. - - Values: - TABLE_TYPE_UNSPECIFIED (0): - An unspecified table type. - EXTERNAL_TABLE (1): - External table type. - BIGLAKE_TABLE (2): - BigLake table type. - OBJECT_TABLE (3): - Object table type for unstructured data. - """ - TABLE_TYPE_UNSPECIFIED = 0 - EXTERNAL_TABLE = 1 - BIGLAKE_TABLE = 2 - OBJECT_TABLE = 3 - - class ConfigDetails(proto.Message): - r"""Details about configuration events. - - Attributes: - parameters (MutableMapping[str, str]): - A list of discovery configuration parameters - in effect. The keys are the field paths within - DiscoverySpec. Eg. includePatterns, - excludePatterns, - csvOptions.disableTypeInference, etc. - """ - - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - class EntityDetails(proto.Message): - r"""Details about the entity. - - Attributes: - entity (str): - The name of the entity resource. - The name is the fully-qualified resource name. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): - The type of the entity resource. - """ - - entity: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DiscoveryEvent.EntityType' = proto.Field( - proto.ENUM, - number=2, - enum='DiscoveryEvent.EntityType', - ) - - class TableDetails(proto.Message): - r"""Details about the published table. - - Attributes: - table (str): - The fully-qualified resource name of the - table resource. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): - The type of the table resource. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DiscoveryEvent.TableType' = proto.Field( - proto.ENUM, - number=2, - enum='DiscoveryEvent.TableType', - ) - - class PartitionDetails(proto.Message): - r"""Details about the partition. - - Attributes: - partition (str): - The name to the partition resource. - The name is the fully-qualified resource name. - entity (str): - The name to the containing entity resource. - The name is the fully-qualified resource name. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): - The type of the containing entity resource. - sampled_data_locations (MutableSequence[str]): - The locations of the data items (e.g., a - Cloud Storage objects) sampled for metadata - inference. - """ - - partition: str = proto.Field( - proto.STRING, - number=1, - ) - entity: str = proto.Field( - proto.STRING, - number=2, - ) - type_: 'DiscoveryEvent.EntityType' = proto.Field( - proto.ENUM, - number=3, - enum='DiscoveryEvent.EntityType', - ) - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - class ActionDetails(proto.Message): - r"""Details about the action. - - Attributes: - type_ (str): - The type of action. - Eg. IncompatibleDataSchema, InvalidDataFormat - issue (str): - The human readable issue associated with the - action. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - issue: str = proto.Field( - proto.STRING, - number=2, - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - lake_id: str = proto.Field( - proto.STRING, - number=2, - ) - zone_id: str = proto.Field( - proto.STRING, - number=3, - ) - asset_id: str = proto.Field( - proto.STRING, - number=4, - ) - data_location: str = proto.Field( - proto.STRING, - number=5, - ) - datascan_id: str = proto.Field( - proto.STRING, - number=6, - ) - type_: EventType = proto.Field( - proto.ENUM, - number=10, - enum=EventType, - ) - config: ConfigDetails = proto.Field( - proto.MESSAGE, - number=20, - oneof='details', - message=ConfigDetails, - ) - entity: EntityDetails = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message=EntityDetails, - ) - partition: PartitionDetails = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message=PartitionDetails, - ) - action: ActionDetails = proto.Field( - proto.MESSAGE, - number=23, - oneof='details', - message=ActionDetails, - ) - table: TableDetails = proto.Field( - proto.MESSAGE, - number=24, - oneof='details', - message=TableDetails, - ) - - -class JobEvent(proto.Message): - r"""The payload associated with Job logs that contains events - describing jobs that have run within a Lake. - - Attributes: - message (str): - The log message. - job_id (str): - The unique id identifying the job. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the job started running. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the job ended running. - state (google.cloud.dataplex_v1.types.JobEvent.State): - The job state on completion. - retries (int): - The number of retries. - type_ (google.cloud.dataplex_v1.types.JobEvent.Type): - The type of the job. - service (google.cloud.dataplex_v1.types.JobEvent.Service): - The service used to execute the job. - service_job (str): - The reference to the job within the service. - execution_trigger (google.cloud.dataplex_v1.types.JobEvent.ExecutionTrigger): - Job execution trigger. - """ - class Type(proto.Enum): - r"""The type of the job. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified job type. - SPARK (1): - Spark jobs. - NOTEBOOK (2): - Notebook jobs. - """ - TYPE_UNSPECIFIED = 0 - SPARK = 1 - NOTEBOOK = 2 - - class State(proto.Enum): - r"""The completion status of the job. - - Values: - STATE_UNSPECIFIED (0): - Unspecified job state. - SUCCEEDED (1): - Job successfully completed. - FAILED (2): - Job was unsuccessful. - CANCELLED (3): - Job was cancelled by the user. - ABORTED (4): - Job was cancelled or aborted via the service - executing the job. - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - CANCELLED = 3 - ABORTED = 4 - - class Service(proto.Enum): - r"""The service used to execute the job. - - Values: - SERVICE_UNSPECIFIED (0): - Unspecified service. - DATAPROC (1): - Cloud Dataproc. - """ - SERVICE_UNSPECIFIED = 0 - DATAPROC = 1 - - class ExecutionTrigger(proto.Enum): - r"""Job Execution trigger. - - Values: - EXECUTION_TRIGGER_UNSPECIFIED (0): - The job execution trigger is unspecified. - TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. - RUN_REQUEST (2): - The job was triggered by the explicit call of - Task API. - """ - EXECUTION_TRIGGER_UNSPECIFIED = 0 - TASK_CONFIG = 1 - RUN_REQUEST = 2 - - message: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - retries: int = proto.Field( - proto.INT32, - number=6, - ) - type_: Type = proto.Field( - proto.ENUM, - number=7, - enum=Type, - ) - service: Service = proto.Field( - proto.ENUM, - number=8, - enum=Service, - ) - service_job: str = proto.Field( - proto.STRING, - number=9, - ) - execution_trigger: ExecutionTrigger = proto.Field( - proto.ENUM, - number=11, - enum=ExecutionTrigger, - ) - - -class SessionEvent(proto.Message): - r"""These messages contain information about sessions within an - environment. The monitored resource is 'Environment'. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - user_id (str): - The information about the user that created - the session. It will be the email address of the - user. - session_id (str): - Unique identifier for the session. - type_ (google.cloud.dataplex_v1.types.SessionEvent.EventType): - The type of the event. - query (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail): - The execution details of the query. - - This field is a member of `oneof`_ ``detail``. - event_succeeded (bool): - The status of the event. - fast_startup_enabled (bool): - If the session is associated with an - environment with fast startup enabled, and was - created before being assigned to a user. - unassigned_duration (google.protobuf.duration_pb2.Duration): - The idle duration of a warm pooled session - before it is assigned to user. - """ - class EventType(proto.Enum): - r"""The type of the event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - START (1): - Event when the session is assigned to a user. - STOP (2): - Event for stop of a session. - QUERY (3): - Query events in the session. - CREATE (4): - Event for creation of a cluster. It is not - yet assigned to a user. This comes before START - in the sequence - """ - EVENT_TYPE_UNSPECIFIED = 0 - START = 1 - STOP = 2 - QUERY = 3 - CREATE = 4 - - class QueryDetail(proto.Message): - r"""Execution details of the query. - - Attributes: - query_id (str): - The unique Query id identifying the query. - query_text (str): - The query text executed. - engine (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail.Engine): - Query Execution engine. - duration (google.protobuf.duration_pb2.Duration): - Time taken for execution of the query. - result_size_bytes (int): - The size of results the query produced. - data_processed_bytes (int): - The data processed by the query. - """ - class Engine(proto.Enum): - r"""Query Execution engine. - - Values: - ENGINE_UNSPECIFIED (0): - An unspecified Engine type. - SPARK_SQL (1): - Spark-sql engine is specified in Query. - BIGQUERY (2): - BigQuery engine is specified in Query. - """ - ENGINE_UNSPECIFIED = 0 - SPARK_SQL = 1 - BIGQUERY = 2 - - query_id: str = proto.Field( - proto.STRING, - number=1, - ) - query_text: str = proto.Field( - proto.STRING, - number=2, - ) - engine: 'SessionEvent.QueryDetail.Engine' = proto.Field( - proto.ENUM, - number=3, - enum='SessionEvent.QueryDetail.Engine', - ) - duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - result_size_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - data_processed_bytes: int = proto.Field( - proto.INT64, - number=6, - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: str = proto.Field( - proto.STRING, - number=2, - ) - session_id: str = proto.Field( - proto.STRING, - number=3, - ) - type_: EventType = proto.Field( - proto.ENUM, - number=4, - enum=EventType, - ) - query: QueryDetail = proto.Field( - proto.MESSAGE, - number=5, - oneof='detail', - message=QueryDetail, - ) - event_succeeded: bool = proto.Field( - proto.BOOL, - number=6, - ) - fast_startup_enabled: bool = proto.Field( - proto.BOOL, - number=7, - ) - unassigned_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - - -class GovernanceEvent(proto.Message): - r"""Payload associated with Governance related log events. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - event_type (google.cloud.dataplex_v1.types.GovernanceEvent.EventType): - The type of the event. - entity (google.cloud.dataplex_v1.types.GovernanceEvent.Entity): - Entity resource information if the log event - is associated with a specific entity. - - This field is a member of `oneof`_ ``_entity``. - """ - class EventType(proto.Enum): - r"""Type of governance log event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - RESOURCE_IAM_POLICY_UPDATE (1): - Resource IAM policy update event. - BIGQUERY_TABLE_CREATE (2): - BigQuery table create event. - BIGQUERY_TABLE_UPDATE (3): - BigQuery table update event. - BIGQUERY_TABLE_DELETE (4): - BigQuery table delete event. - BIGQUERY_CONNECTION_CREATE (5): - BigQuery connection create event. - BIGQUERY_CONNECTION_UPDATE (6): - BigQuery connection update event. - BIGQUERY_CONNECTION_DELETE (7): - BigQuery connection delete event. - BIGQUERY_TAXONOMY_CREATE (10): - BigQuery taxonomy created. - BIGQUERY_POLICY_TAG_CREATE (11): - BigQuery policy tag created. - BIGQUERY_POLICY_TAG_DELETE (12): - BigQuery policy tag deleted. - BIGQUERY_POLICY_TAG_SET_IAM_POLICY (13): - BigQuery set iam policy for policy tag. - ACCESS_POLICY_UPDATE (14): - Access policy update event. - GOVERNANCE_RULE_MATCHED_RESOURCES (15): - Number of resources matched with particular - Query. - GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS (16): - Rule processing exceeds the allowed limit. - GOVERNANCE_RULE_ERRORS (17): - Rule processing errors. - GOVERNANCE_RULE_PROCESSING (18): - Governance rule processing Event. - """ - EVENT_TYPE_UNSPECIFIED = 0 - RESOURCE_IAM_POLICY_UPDATE = 1 - BIGQUERY_TABLE_CREATE = 2 - BIGQUERY_TABLE_UPDATE = 3 - BIGQUERY_TABLE_DELETE = 4 - BIGQUERY_CONNECTION_CREATE = 5 - BIGQUERY_CONNECTION_UPDATE = 6 - BIGQUERY_CONNECTION_DELETE = 7 - BIGQUERY_TAXONOMY_CREATE = 10 - BIGQUERY_POLICY_TAG_CREATE = 11 - BIGQUERY_POLICY_TAG_DELETE = 12 - BIGQUERY_POLICY_TAG_SET_IAM_POLICY = 13 - ACCESS_POLICY_UPDATE = 14 - GOVERNANCE_RULE_MATCHED_RESOURCES = 15 - GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS = 16 - GOVERNANCE_RULE_ERRORS = 17 - GOVERNANCE_RULE_PROCESSING = 18 - - class Entity(proto.Message): - r"""Information about Entity resource that the log event is - associated with. - - Attributes: - entity (str): - The Entity resource the log event is associated with. - Format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}`` - entity_type (google.cloud.dataplex_v1.types.GovernanceEvent.Entity.EntityType): - Type of entity. - """ - class EntityType(proto.Enum): - r"""Type of entity. - - Values: - ENTITY_TYPE_UNSPECIFIED (0): - An unspecified Entity type. - TABLE (1): - Table entity type. - FILESET (2): - Fileset entity type. - """ - ENTITY_TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - entity: str = proto.Field( - proto.STRING, - number=1, - ) - entity_type: 'GovernanceEvent.Entity.EntityType' = proto.Field( - proto.ENUM, - number=2, - enum='GovernanceEvent.Entity.EntityType', - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - event_type: EventType = proto.Field( - proto.ENUM, - number=2, - enum=EventType, - ) - entity: Entity = proto.Field( - proto.MESSAGE, - number=3, - optional=True, - message=Entity, - ) - - -class DataScanEvent(proto.Message): - r"""These messages contain information about the execution of a - datascan. The monitored resource is 'DataScan' - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - data_source (str): - The data source of the data scan - job_id (str): - The identifier of the specific data scan job - this log entry is for. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job started to - run. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job finished. - type_ (google.cloud.dataplex_v1.types.DataScanEvent.ScanType): - The type of the data scan. - state (google.cloud.dataplex_v1.types.DataScanEvent.State): - The status of the data scan job. - message (str): - The message describing the data scan job - event. - spec_version (str): - A version identifier of the spec which was - used to execute this job. - trigger (google.cloud.dataplex_v1.types.DataScanEvent.Trigger): - The trigger type of the data scan job. - scope (google.cloud.dataplex_v1.types.DataScanEvent.Scope): - The scope of the data scan (e.g. full, - incremental). - data_profile (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileResult): - Data profile result for data profile type - data scan. - - This field is a member of `oneof`_ ``result``. - data_quality (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityResult): - Data quality result for data quality type - data scan. - - This field is a member of `oneof`_ ``result``. - data_profile_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileAppliedConfigs): - Applied configs for data profile type data - scan. - - This field is a member of `oneof`_ ``appliedConfigs``. - data_quality_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityAppliedConfigs): - Applied configs for data quality type data - scan. - - This field is a member of `oneof`_ ``appliedConfigs``. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): - The result of post scan actions. - """ - class ScanType(proto.Enum): - r"""The type of the data scan. - - Values: - SCAN_TYPE_UNSPECIFIED (0): - An unspecified data scan type. - DATA_PROFILE (1): - Data scan for data profile. - DATA_QUALITY (2): - Data scan for data quality. - DATA_DISCOVERY (4): - Data scan for data discovery. - """ - SCAN_TYPE_UNSPECIFIED = 0 - DATA_PROFILE = 1 - DATA_QUALITY = 2 - DATA_DISCOVERY = 4 - - class State(proto.Enum): - r"""The job state of the data scan. - - Values: - STATE_UNSPECIFIED (0): - Unspecified job state. - STARTED (1): - Data scan job started. - SUCCEEDED (2): - Data scan job successfully completed. - FAILED (3): - Data scan job was unsuccessful. - CANCELLED (4): - Data scan job was cancelled. - CREATED (5): - Data scan job was createed. - """ - STATE_UNSPECIFIED = 0 - STARTED = 1 - SUCCEEDED = 2 - FAILED = 3 - CANCELLED = 4 - CREATED = 5 - - class Trigger(proto.Enum): - r"""The trigger type for the data scan. - - Values: - TRIGGER_UNSPECIFIED (0): - An unspecified trigger type. - ON_DEMAND (1): - Data scan triggers on demand. - SCHEDULE (2): - Data scan triggers as per schedule. - """ - TRIGGER_UNSPECIFIED = 0 - ON_DEMAND = 1 - SCHEDULE = 2 - - class Scope(proto.Enum): - r"""The scope of job for the data scan. - - Values: - SCOPE_UNSPECIFIED (0): - An unspecified scope type. - FULL (1): - Data scan runs on all of the data. - INCREMENTAL (2): - Data scan runs on incremental data. - """ - SCOPE_UNSPECIFIED = 0 - FULL = 1 - INCREMENTAL = 2 - - class DataProfileResult(proto.Message): - r"""Data profile result for data scan job. - - Attributes: - row_count (int): - The count of rows processed in the data scan - job. - """ - - row_count: int = proto.Field( - proto.INT64, - number=1, - ) - - class DataQualityResult(proto.Message): - r"""Data quality result for data scan job. - - Attributes: - row_count (int): - The count of rows processed in the data scan - job. - passed (bool): - Whether the data quality result was ``pass`` or not. - dimension_passed (MutableMapping[str, bool]): - The result of each dimension for data quality result. The - key of the map is the name of the dimension. The value is - the bool value depicting whether the dimension result was - ``pass`` or not. - score (float): - The table-level data quality score for the data scan job. - - The data quality score ranges between [0, 100] (up to two - decimal points). - dimension_score (MutableMapping[str, float]): - The score of each dimension for data quality result. The key - of the map is the name of the dimension. The value is the - data quality score for the dimension. - - The score ranges between [0, 100] (up to two decimal - points). - column_score (MutableMapping[str, float]): - The score of each column scanned in the data scan job. The - key of the map is the name of the column. The value is the - data quality score for the column. - - The score ranges between [0, 100] (up to two decimal - points). - """ - - row_count: int = proto.Field( - proto.INT64, - number=1, - ) - passed: bool = proto.Field( - proto.BOOL, - number=2, - ) - dimension_passed: MutableMapping[str, bool] = proto.MapField( - proto.STRING, - proto.BOOL, - number=3, - ) - score: float = proto.Field( - proto.FLOAT, - number=4, - ) - dimension_score: MutableMapping[str, float] = proto.MapField( - proto.STRING, - proto.FLOAT, - number=5, - ) - column_score: MutableMapping[str, float] = proto.MapField( - proto.STRING, - proto.FLOAT, - number=6, - ) - - class DataProfileAppliedConfigs(proto.Message): - r"""Applied configs for data profile type data scan job. - - Attributes: - sampling_percent (float): - The percentage of the records selected from the dataset for - DataScan. - - - Value ranges between 0.0 and 100.0. - - Value 0.0 or 100.0 imply that sampling was not applied. - row_filter_applied (bool): - Boolean indicating whether a row filter was - applied in the DataScan job. - column_filter_applied (bool): - Boolean indicating whether a column filter - was applied in the DataScan job. - """ - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=1, - ) - row_filter_applied: bool = proto.Field( - proto.BOOL, - number=2, - ) - column_filter_applied: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class DataQualityAppliedConfigs(proto.Message): - r"""Applied configs for data quality type data scan job. - - Attributes: - sampling_percent (float): - The percentage of the records selected from the dataset for - DataScan. - - - Value ranges between 0.0 and 100.0. - - Value 0.0 or 100.0 imply that sampling was not applied. - row_filter_applied (bool): - Boolean indicating whether a row filter was - applied in the DataScan job. - """ - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=1, - ) - row_filter_applied: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class PostScanActionsResult(proto.Message): - r"""Post scan actions result for data scan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult): - The result of BigQuery export post scan - action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult.State): - Execution state for the BigQuery exporting. - message (str): - Additional information about the BigQuery - exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataScanEvent.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScanEvent.PostScanActionsResult.BigQueryExportResult', - ) - - data_source: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - type_: ScanType = proto.Field( - proto.ENUM, - number=5, - enum=ScanType, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - message: str = proto.Field( - proto.STRING, - number=7, - ) - spec_version: str = proto.Field( - proto.STRING, - number=8, - ) - trigger: Trigger = proto.Field( - proto.ENUM, - number=9, - enum=Trigger, - ) - scope: Scope = proto.Field( - proto.ENUM, - number=10, - enum=Scope, - ) - data_profile: DataProfileResult = proto.Field( - proto.MESSAGE, - number=101, - oneof='result', - message=DataProfileResult, - ) - data_quality: DataQualityResult = proto.Field( - proto.MESSAGE, - number=102, - oneof='result', - message=DataQualityResult, - ) - data_profile_configs: DataProfileAppliedConfigs = proto.Field( - proto.MESSAGE, - number=201, - oneof='appliedConfigs', - message=DataProfileAppliedConfigs, - ) - data_quality_configs: DataQualityAppliedConfigs = proto.Field( - proto.MESSAGE, - number=202, - oneof='appliedConfigs', - message=DataQualityAppliedConfigs, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=11, - message=PostScanActionsResult, - ) - - -class DataQualityScanRuleResult(proto.Message): - r"""Information about the result of a data quality rule for data - quality scan. The monitored resource is 'DataScan'. - - Attributes: - job_id (str): - Identifier of the specific data scan job this - log entry is for. - data_source (str): - The data source of the data scan (e.g. - BigQuery table name). - column (str): - The column which this rule is evaluated - against. - rule_name (str): - The name of the data quality rule. - rule_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.RuleType): - The type of the data quality rule. - evalution_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.EvaluationType): - The evaluation type of the data quality rule. - rule_dimension (str): - The dimension of the data quality rule. - threshold_percent (float): - The passing threshold ([0.0, 100.0]) of the data quality - rule. - result (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.Result): - The result of the data quality rule. - evaluated_row_count (int): - The number of rows evaluated against the data quality rule. - This field is only valid for rules of PER_ROW evaluation - type. - passed_row_count (int): - The number of rows which passed a rule evaluation. This - field is only valid for rules of PER_ROW evaluation type. - null_row_count (int): - The number of rows with null values in the - specified column. - assertion_row_count (int): - The number of rows returned by the SQL - statement in a SQL assertion rule. This field is - only valid for SQL assertion rules. - """ - class RuleType(proto.Enum): - r"""The type of the data quality rule. - - Values: - RULE_TYPE_UNSPECIFIED (0): - An unspecified rule type. - NON_NULL_EXPECTATION (1): - See - [DataQualityRule.NonNullExpectation][google.cloud.dataplex.v1.DataQualityRule.NonNullExpectation]. - RANGE_EXPECTATION (2): - See - [DataQualityRule.RangeExpectation][google.cloud.dataplex.v1.DataQualityRule.RangeExpectation]. - REGEX_EXPECTATION (3): - See - [DataQualityRule.RegexExpectation][google.cloud.dataplex.v1.DataQualityRule.RegexExpectation]. - ROW_CONDITION_EXPECTATION (4): - See - [DataQualityRule.RowConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectation]. - SET_EXPECTATION (5): - See - [DataQualityRule.SetExpectation][google.cloud.dataplex.v1.DataQualityRule.SetExpectation]. - STATISTIC_RANGE_EXPECTATION (6): - See - [DataQualityRule.StatisticRangeExpectation][google.cloud.dataplex.v1.DataQualityRule.StatisticRangeExpectation]. - TABLE_CONDITION_EXPECTATION (7): - See - [DataQualityRule.TableConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.TableConditionExpectation]. - UNIQUENESS_EXPECTATION (8): - See - [DataQualityRule.UniquenessExpectation][google.cloud.dataplex.v1.DataQualityRule.UniquenessExpectation]. - SQL_ASSERTION (9): - See - [DataQualityRule.SqlAssertion][google.cloud.dataplex.v1.DataQualityRule.SqlAssertion]. - """ - RULE_TYPE_UNSPECIFIED = 0 - NON_NULL_EXPECTATION = 1 - RANGE_EXPECTATION = 2 - REGEX_EXPECTATION = 3 - ROW_CONDITION_EXPECTATION = 4 - SET_EXPECTATION = 5 - STATISTIC_RANGE_EXPECTATION = 6 - TABLE_CONDITION_EXPECTATION = 7 - UNIQUENESS_EXPECTATION = 8 - SQL_ASSERTION = 9 - - class EvaluationType(proto.Enum): - r"""The evaluation type of the data quality rule. - - Values: - EVALUATION_TYPE_UNSPECIFIED (0): - An unspecified evaluation type. - PER_ROW (1): - The rule evaluation is done at per row level. - AGGREGATE (2): - The rule evaluation is done for an aggregate - of rows. - """ - EVALUATION_TYPE_UNSPECIFIED = 0 - PER_ROW = 1 - AGGREGATE = 2 - - class Result(proto.Enum): - r"""Whether the data quality rule passed or failed. - - Values: - RESULT_UNSPECIFIED (0): - An unspecified result. - PASSED (1): - The data quality rule passed. - FAILED (2): - The data quality rule failed. - """ - RESULT_UNSPECIFIED = 0 - PASSED = 1 - FAILED = 2 - - job_id: str = proto.Field( - proto.STRING, - number=1, - ) - data_source: str = proto.Field( - proto.STRING, - number=2, - ) - column: str = proto.Field( - proto.STRING, - number=3, - ) - rule_name: str = proto.Field( - proto.STRING, - number=4, - ) - rule_type: RuleType = proto.Field( - proto.ENUM, - number=5, - enum=RuleType, - ) - evalution_type: EvaluationType = proto.Field( - proto.ENUM, - number=6, - enum=EvaluationType, - ) - rule_dimension: str = proto.Field( - proto.STRING, - number=7, - ) - threshold_percent: float = proto.Field( - proto.DOUBLE, - number=8, - ) - result: Result = proto.Field( - proto.ENUM, - number=9, - enum=Result, - ) - evaluated_row_count: int = proto.Field( - proto.INT64, - number=10, - ) - passed_row_count: int = proto.Field( - proto.INT64, - number=11, - ) - null_row_count: int = proto.Field( - proto.INT64, - number=12, - ) - assertion_row_count: int = proto.Field( - proto.INT64, - number=13, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py deleted file mode 100644 index 8d977d572bf0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py +++ /dev/null @@ -1,1182 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'StorageSystem', - 'CreateEntityRequest', - 'UpdateEntityRequest', - 'DeleteEntityRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'GetEntityRequest', - 'ListPartitionsRequest', - 'CreatePartitionRequest', - 'DeletePartitionRequest', - 'ListPartitionsResponse', - 'GetPartitionRequest', - 'Entity', - 'Partition', - 'Schema', - 'StorageFormat', - 'StorageAccess', - }, -) - - -class StorageSystem(proto.Enum): - r"""Identifies the cloud system that manages the data storage. - - Values: - STORAGE_SYSTEM_UNSPECIFIED (0): - Storage system unspecified. - CLOUD_STORAGE (1): - The entity data is contained within a Cloud - Storage bucket. - BIGQUERY (2): - The entity data is contained within a - BigQuery dataset. - """ - STORAGE_SYSTEM_UNSPECIFIED = 0 - CLOUD_STORAGE = 1 - BIGQUERY = 2 - - -class CreateEntityRequest(proto.Message): - r"""Create a metadata entity request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - entity (google.cloud.dataplex_v1.types.Entity): - Required. Entity resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entity: 'Entity' = proto.Field( - proto.MESSAGE, - number=3, - message='Entity', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntityRequest(proto.Message): - r"""Update a metadata entity request. - The exiting entity will be fully replaced by the entity in the - request. The entity ID is mutable. To modify the ID, use the - current entity ID in the request URL and specify the new ID in - the request body. - - Attributes: - entity (google.cloud.dataplex_v1.types.Entity): - Required. Update description. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - entity: 'Entity' = proto.Field( - proto.MESSAGE, - number=2, - message='Entity', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntityRequest(proto.Message): - r"""Delete a metadata entity request. - - Attributes: - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - etag (str): - Required. The etag associated with the entity, which can be - retrieved with a [GetEntity][] request. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntitiesRequest(proto.Message): - r"""List metadata entities request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - view (google.cloud.dataplex_v1.types.ListEntitiesRequest.EntityView): - Required. Specify the entity view to make a - partial list request. - page_size (int): - Optional. Maximum number of entities to - return. The service may return fewer than this - value. If unspecified, 100 entities will be - returned by default. The maximum value is 500; - larger values will will be truncated to 500. - page_token (str): - Optional. Page token received from a previous - ``ListEntities`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEntities`` must match the call that - provided the page token. - filter (str): - Optional. The following filter parameters can be added to - the URL to limit the entities returned by the API: - - - Entity ID: ?filter="id=entityID" - - Asset ID: ?filter="asset=assetID" - - Data path ?filter="data_path=gs://my-bucket" - - Is HIVE compatible: ?filter="hive_compatible=true" - - Is BigQuery compatible: - ?filter="bigquery_compatible=true". - """ - class EntityView(proto.Enum): - r"""Entity views. - - Values: - ENTITY_VIEW_UNSPECIFIED (0): - The default unset value. Return both table - and fileset entities if unspecified. - TABLES (1): - Only list table entities. - FILESETS (2): - Only list fileset entities. - """ - ENTITY_VIEW_UNSPECIFIED = 0 - TABLES = 1 - FILESETS = 2 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - view: EntityView = proto.Field( - proto.ENUM, - number=2, - enum=EntityView, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntitiesResponse(proto.Message): - r"""List metadata entities response. - - Attributes: - entities (MutableSequence[google.cloud.dataplex_v1.types.Entity]): - Entities in the specified parent zone. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no remaining results in - the list. - """ - - @property - def raw_page(self): - return self - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEntityRequest(proto.Message): - r"""Get metadata entity request. - - Attributes: - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - view (google.cloud.dataplex_v1.types.GetEntityRequest.EntityView): - Optional. Used to select the subset of entity information to - return. Defaults to ``BASIC``. - """ - class EntityView(proto.Enum): - r"""Entity views for get entity partial result. - - Values: - ENTITY_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Minimal view that does not include the - schema. - SCHEMA (2): - Include basic information and schema. - FULL (4): - Include everything. Currently, this is the - same as the SCHEMA view. - """ - ENTITY_VIEW_UNSPECIFIED = 0 - BASIC = 1 - SCHEMA = 2 - FULL = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: EntityView = proto.Field( - proto.ENUM, - number=2, - enum=EntityView, - ) - - -class ListPartitionsRequest(proto.Message): - r"""List metadata partitions request. - - Attributes: - parent (str): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - page_size (int): - Optional. Maximum number of partitions to - return. The service may return fewer than this - value. If unspecified, 100 partitions will be - returned by default. The maximum page size is - 500; larger values will will be truncated to - 500. - page_token (str): - Optional. Page token received from a previous - ``ListPartitions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListPartitions`` must match the call that - provided the page token. - filter (str): - Optional. Filter the partitions returned to the caller using - a key value pair expression. Supported operators and syntax: - - - logic operators: AND, OR - - comparison operators: <, >, >=, <= ,=, != - - LIKE operators: - - - The right hand of a LIKE operator supports "." and "*" - for wildcard searches, for example "value1 LIKE - ".*oo.*" - - - parenthetical grouping: ( ) - - Sample filter expression: \`?filter="key1 < value1 OR key2 > - value2" - - **Notes:** - - - Keys to the left of operators are case insensitive. - - Partition results are sorted first by creation time, then - by lexicographic order. - - Up to 20 key value filter pairs are allowed, but due to - performance considerations, only the first 10 will be - used as a filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreatePartitionRequest(proto.Message): - r"""Create metadata partition request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - partition (google.cloud.dataplex_v1.types.Partition): - Required. Partition resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - partition: 'Partition' = proto.Field( - proto.MESSAGE, - number=3, - message='Partition', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class DeletePartitionRequest(proto.Message): - r"""Delete metadata partition request. - - Attributes: - name (str): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an ordered - sequence of partition values separated by "/". All values - must be provided. - etag (str): - Optional. The etag associated with the - partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListPartitionsResponse(proto.Message): - r"""List metadata partitions response. - - Attributes: - partitions (MutableSequence[google.cloud.dataplex_v1.types.Partition]): - Partitions under the specified parent entity. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no remaining results in - the list. - """ - - @property - def raw_page(self): - return self - - partitions: MutableSequence['Partition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Partition', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetPartitionRequest(proto.Message): - r"""Get metadata partition request. - - Attributes: - name (str): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an ordered - sequence of partition values separated by "/". All values - must be provided. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Entity(proto.Message): - r"""Represents tables and fileset metadata contained within a - zone. - - Attributes: - name (str): - Output only. The resource name of the entity, of the form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{id}``. - display_name (str): - Optional. Display name must be shorter than - or equal to 256 characters. - description (str): - Optional. User friendly longer description - text. Must be shorter than or equal to 1024 - characters. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entity was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entity was - last updated. - id (str): - Required. A user-provided entity ID. It is - mutable, and will be used as the published table - name. Specifying a new ID in an update entity - request will override the existing value. - The ID must contain only letters (a-z, A-Z), - numbers (0-9), and underscores, and consist of - 256 or fewer characters. - etag (str): - Optional. The etag associated with the entity, which can be - retrieved with a [GetEntity][] request. Required for update - and delete requests. - type_ (google.cloud.dataplex_v1.types.Entity.Type): - Required. Immutable. The type of entity. - asset (str): - Required. Immutable. The ID of the asset - associated with the storage location containing - the entity data. The entity must be with in the - same zone with the asset. - data_path (str): - Required. Immutable. The storage path of the entity data. - For Cloud Storage data, this is the fully-qualified path to - the entity, such as ``gs://bucket/path/to/data``. For - BigQuery data, this is the name of the table resource, such - as - ``projects/project_id/datasets/dataset_id/tables/table_id``. - data_path_pattern (str): - Optional. The set of items within the data path constituting - the data in the entity, represented as a glob path. Example: - ``gs://bucket/path/to/data/**/*.csv``. - catalog_entry (str): - Output only. The name of the associated Data - Catalog entry. - system (google.cloud.dataplex_v1.types.StorageSystem): - Required. Immutable. Identifies the storage - system of the entity data. - format_ (google.cloud.dataplex_v1.types.StorageFormat): - Required. Identifies the storage format of - the entity data. It does not apply to entities - with data stored in BigQuery. - compatibility (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus): - Output only. Metadata stores that the entity - is compatible with. - access (google.cloud.dataplex_v1.types.StorageAccess): - Output only. Identifies the access mechanism - to the entity. Not user settable. - uid (str): - Output only. System generated unique ID for - the Entity. This ID will be different if the - Entity is deleted and re-created with the same - name. - schema (google.cloud.dataplex_v1.types.Schema): - Required. The description of the data structure and layout. - The schema is not included in list responses. It is only - included in ``SCHEMA`` and ``FULL`` entity views of a - ``GetEntity`` response. - """ - class Type(proto.Enum): - r"""The type of entity. - - Values: - TYPE_UNSPECIFIED (0): - Type unspecified. - TABLE (1): - Structured and semi-structured data. - FILESET (2): - Unstructured data. - """ - TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - class CompatibilityStatus(proto.Message): - r"""Provides compatibility information for various metadata - stores. - - Attributes: - hive_metastore (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): - Output only. Whether this entity is - compatible with Hive Metastore. - bigquery (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): - Output only. Whether this entity is - compatible with BigQuery. - """ - - class Compatibility(proto.Message): - r"""Provides compatibility information for a specific metadata - store. - - Attributes: - compatible (bool): - Output only. Whether the entity is compatible - and can be represented in the metadata store. - reason (str): - Output only. Provides additional detail if - the entity is incompatible with the metadata - store. - """ - - compatible: bool = proto.Field( - proto.BOOL, - number=1, - ) - reason: str = proto.Field( - proto.STRING, - number=2, - ) - - hive_metastore: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( - proto.MESSAGE, - number=1, - message='Entity.CompatibilityStatus.Compatibility', - ) - bigquery: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( - proto.MESSAGE, - number=2, - message='Entity.CompatibilityStatus.Compatibility', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - id: str = proto.Field( - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - type_: Type = proto.Field( - proto.ENUM, - number=10, - enum=Type, - ) - asset: str = proto.Field( - proto.STRING, - number=11, - ) - data_path: str = proto.Field( - proto.STRING, - number=12, - ) - data_path_pattern: str = proto.Field( - proto.STRING, - number=13, - ) - catalog_entry: str = proto.Field( - proto.STRING, - number=14, - ) - system: 'StorageSystem' = proto.Field( - proto.ENUM, - number=15, - enum='StorageSystem', - ) - format_: 'StorageFormat' = proto.Field( - proto.MESSAGE, - number=16, - message='StorageFormat', - ) - compatibility: CompatibilityStatus = proto.Field( - proto.MESSAGE, - number=19, - message=CompatibilityStatus, - ) - access: 'StorageAccess' = proto.Field( - proto.MESSAGE, - number=21, - message='StorageAccess', - ) - uid: str = proto.Field( - proto.STRING, - number=22, - ) - schema: 'Schema' = proto.Field( - proto.MESSAGE, - number=50, - message='Schema', - ) - - -class Partition(proto.Message): - r"""Represents partition metadata contained within entity - instances. - - Attributes: - name (str): - Output only. Partition values used in the HTTP URL must be - double encoded. For example, - ``url_encode(url_encode(value))`` can be used to encode - "US:CA/CA#Sunnyvale so that the request URL ends with - "/partitions/US%253ACA/CA%2523Sunnyvale". The name field in - the response retains the encoded format. - values (MutableSequence[str]): - Required. Immutable. The set of values - representing the partition, which correspond to - the partition schema defined in the parent - entity. - location (str): - Required. Immutable. The location of the entity data within - the partition, for example, - ``gs://bucket/path/to/entity/key1=value1/key2=value2``. Or - ``projects//datasets//tables/`` - etag (str): - Optional. The etag for this partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - etag: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Schema(proto.Message): - r"""Schema information describing the structure and layout of the - data. - - Attributes: - user_managed (bool): - Required. Set to ``true`` if user-managed or ``false`` if - managed by Dataplex. The default is ``false`` (managed by - Dataplex). - - - Set to ``false``\ to enable Dataplex discovery to update - the schema. including new data discovery, schema - inference, and schema evolution. Users retain the ability - to input and edit the schema. Dataplex treats schema - input by the user as though produced by a previous - Dataplex discovery operation, and it will evolve the - schema and take action based on that treatment. - - - Set to ``true`` to fully manage the entity schema. This - setting guarantees that Dataplex will not change schema - fields. - fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): - Optional. The sequence of fields describing data in table - entities. **Note:** BigQuery SchemaFields are immutable. - partition_fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.PartitionField]): - Optional. The sequence of fields describing - the partition structure in entities. If this - field is empty, there are no partitions within - the data. - partition_style (google.cloud.dataplex_v1.types.Schema.PartitionStyle): - Optional. The structure of paths containing - partition data within the entity. - """ - class Type(proto.Enum): - r"""Type information for fields in schemas and partition schemas. - - Values: - TYPE_UNSPECIFIED (0): - SchemaType unspecified. - BOOLEAN (1): - Boolean field. - BYTE (2): - Single byte numeric field. - INT16 (3): - 16-bit numeric field. - INT32 (4): - 32-bit numeric field. - INT64 (5): - 64-bit numeric field. - FLOAT (6): - Floating point numeric field. - DOUBLE (7): - Double precision numeric field. - DECIMAL (8): - Real value numeric field. - STRING (9): - Sequence of characters field. - BINARY (10): - Sequence of bytes field. - TIMESTAMP (11): - Date and time field. - DATE (12): - Date field. - TIME (13): - Time field. - RECORD (14): - Structured field. Nested fields that define - the structure of the map. If all nested fields - are nullable, this field represents a union. - NULL (100): - Null field that does not have values. - """ - TYPE_UNSPECIFIED = 0 - BOOLEAN = 1 - BYTE = 2 - INT16 = 3 - INT32 = 4 - INT64 = 5 - FLOAT = 6 - DOUBLE = 7 - DECIMAL = 8 - STRING = 9 - BINARY = 10 - TIMESTAMP = 11 - DATE = 12 - TIME = 13 - RECORD = 14 - NULL = 100 - - class Mode(proto.Enum): - r"""Additional qualifiers to define field semantics. - - Values: - MODE_UNSPECIFIED (0): - Mode unspecified. - REQUIRED (1): - The field has required semantics. - NULLABLE (2): - The field has optional semantics, and may be - null. - REPEATED (3): - The field has repeated (0 or more) semantics, - and is a list of values. - """ - MODE_UNSPECIFIED = 0 - REQUIRED = 1 - NULLABLE = 2 - REPEATED = 3 - - class PartitionStyle(proto.Enum): - r"""The structure of paths within the entity, which represent - partitions. - - Values: - PARTITION_STYLE_UNSPECIFIED (0): - PartitionStyle unspecified - HIVE_COMPATIBLE (1): - Partitions are hive-compatible. Examples: - ``gs://bucket/path/to/table/dt=2019-10-31/lang=en``, - ``gs://bucket/path/to/table/dt=2019-10-31/lang=en/late``. - """ - PARTITION_STYLE_UNSPECIFIED = 0 - HIVE_COMPATIBLE = 1 - - class SchemaField(proto.Message): - r"""Represents a column field within a table schema. - - Attributes: - name (str): - Required. The name of the field. Must contain - only letters, numbers and underscores, with a - maximum length of 767 characters, and must begin - with a letter or underscore. - description (str): - Optional. User friendly field description. - Must be less than or equal to 1024 characters. - type_ (google.cloud.dataplex_v1.types.Schema.Type): - Required. The type of field. - mode (google.cloud.dataplex_v1.types.Schema.Mode): - Required. Additional field semantics. - fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): - Optional. Any nested field for complex types. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - type_: 'Schema.Type' = proto.Field( - proto.ENUM, - number=3, - enum='Schema.Type', - ) - mode: 'Schema.Mode' = proto.Field( - proto.ENUM, - number=4, - enum='Schema.Mode', - ) - fields: MutableSequence['Schema.SchemaField'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='Schema.SchemaField', - ) - - class PartitionField(proto.Message): - r"""Represents a key field within the entity's partition structure. You - could have up to 20 partition fields, but only the first 10 - partitions have the filtering ability due to performance - consideration. **Note:** Partition fields are immutable. - - Attributes: - name (str): - Required. Partition field name must consist - of letters, numbers, and underscores only, with - a maximum of length of 256 characters, and must - begin with a letter or underscore.. - type_ (google.cloud.dataplex_v1.types.Schema.Type): - Required. Immutable. The type of field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Schema.Type' = proto.Field( - proto.ENUM, - number=2, - enum='Schema.Type', - ) - - user_managed: bool = proto.Field( - proto.BOOL, - number=1, - ) - fields: MutableSequence[SchemaField] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=SchemaField, - ) - partition_fields: MutableSequence[PartitionField] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=PartitionField, - ) - partition_style: PartitionStyle = proto.Field( - proto.ENUM, - number=4, - enum=PartitionStyle, - ) - - -class StorageFormat(proto.Message): - r"""Describes the format of the data within its storage location. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - format_ (google.cloud.dataplex_v1.types.StorageFormat.Format): - Output only. The data format associated with - the stored data, which represents content type - values. The value is inferred from mime type. - compression_format (google.cloud.dataplex_v1.types.StorageFormat.CompressionFormat): - Optional. The compression type associated - with the stored data. If unspecified, the data - is uncompressed. - mime_type (str): - Required. The mime type descriptor for the - data. Must match the pattern {type}/{subtype}. - Supported values: - - - application/x-parquet - - application/x-avro - - application/x-orc - - application/x-tfrecord - - application/x-parquet+iceberg - - application/x-avro+iceberg - - application/x-orc+iceberg - - application/json - - application/{subtypes} - - text/csv - - text/ - - image/{image subtype} - - video/{video subtype} - - audio/{audio subtype} - csv (google.cloud.dataplex_v1.types.StorageFormat.CsvOptions): - Optional. Additional information about CSV - formatted data. - - This field is a member of `oneof`_ ``options``. - json (google.cloud.dataplex_v1.types.StorageFormat.JsonOptions): - Optional. Additional information about CSV - formatted data. - - This field is a member of `oneof`_ ``options``. - iceberg (google.cloud.dataplex_v1.types.StorageFormat.IcebergOptions): - Optional. Additional information about - iceberg tables. - - This field is a member of `oneof`_ ``options``. - """ - class Format(proto.Enum): - r"""The specific file format of the data. - - Values: - FORMAT_UNSPECIFIED (0): - Format unspecified. - PARQUET (1): - Parquet-formatted structured data. - AVRO (2): - Avro-formatted structured data. - ORC (3): - Orc-formatted structured data. - CSV (100): - Csv-formatted semi-structured data. - JSON (101): - Json-formatted semi-structured data. - IMAGE (200): - Image data formats (such as jpg and png). - AUDIO (201): - Audio data formats (such as mp3, and wav). - VIDEO (202): - Video data formats (such as mp4 and mpg). - TEXT (203): - Textual data formats (such as txt and xml). - TFRECORD (204): - TensorFlow record format. - OTHER (1000): - Data that doesn't match a specific format. - UNKNOWN (1001): - Data of an unknown format. - """ - FORMAT_UNSPECIFIED = 0 - PARQUET = 1 - AVRO = 2 - ORC = 3 - CSV = 100 - JSON = 101 - IMAGE = 200 - AUDIO = 201 - VIDEO = 202 - TEXT = 203 - TFRECORD = 204 - OTHER = 1000 - UNKNOWN = 1001 - - class CompressionFormat(proto.Enum): - r"""The specific compressed file format of the data. - - Values: - COMPRESSION_FORMAT_UNSPECIFIED (0): - CompressionFormat unspecified. Implies - uncompressed data. - GZIP (2): - GZip compressed set of files. - BZIP2 (3): - BZip2 compressed set of files. - """ - COMPRESSION_FORMAT_UNSPECIFIED = 0 - GZIP = 2 - BZIP2 = 3 - - class CsvOptions(proto.Message): - r"""Describes CSV and similar semi-structured data formats. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - Accepts "US-ASCII", "UTF-8", and "ISO-8859-1". - Defaults to UTF-8 if unspecified. - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. Defaults to 0. - delimiter (str): - Optional. The delimiter used to separate - values. Defaults to ','. - quote (str): - Optional. The character used to quote column - values. Accepts '"' (double quotation mark) or - ''' (single quotation mark). Defaults to '"' - (double quotation mark) if unspecified. - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - header_rows: int = proto.Field( - proto.INT32, - number=2, - ) - delimiter: str = proto.Field( - proto.STRING, - number=3, - ) - quote: str = proto.Field( - proto.STRING, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describes JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - Accepts "US-ASCII", "UTF-8" and "ISO-8859-1". - Defaults to UTF-8 if not specified. - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - - class IcebergOptions(proto.Message): - r"""Describes Iceberg data format. - - Attributes: - metadata_location (str): - Optional. The location of where the iceberg - metadata is present, must be within the table - path - """ - - metadata_location: str = proto.Field( - proto.STRING, - number=1, - ) - - format_: Format = proto.Field( - proto.ENUM, - number=1, - enum=Format, - ) - compression_format: CompressionFormat = proto.Field( - proto.ENUM, - number=2, - enum=CompressionFormat, - ) - mime_type: str = proto.Field( - proto.STRING, - number=3, - ) - csv: CsvOptions = proto.Field( - proto.MESSAGE, - number=10, - oneof='options', - message=CsvOptions, - ) - json: JsonOptions = proto.Field( - proto.MESSAGE, - number=11, - oneof='options', - message=JsonOptions, - ) - iceberg: IcebergOptions = proto.Field( - proto.MESSAGE, - number=12, - oneof='options', - message=IcebergOptions, - ) - - -class StorageAccess(proto.Message): - r"""Describes the access mechanism of the data within its storage - location. - - Attributes: - read (google.cloud.dataplex_v1.types.StorageAccess.AccessMode): - Output only. Describes the read access - mechanism of the data. Not user settable. - """ - class AccessMode(proto.Enum): - r"""Access Mode determines how data stored within the Entity is - read. - - Values: - ACCESS_MODE_UNSPECIFIED (0): - Access mode unspecified. - DIRECT (1): - Default. Data is accessed directly using - storage APIs. - MANAGED (2): - Data is accessed through a managed interface - using BigQuery APIs. - """ - ACCESS_MODE_UNSPECIFIED = 0 - DIRECT = 1 - MANAGED = 2 - - read: AccessMode = proto.Field( - proto.ENUM, - number=21, - enum=AccessMode, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py deleted file mode 100644 index 4e88739fa5ce..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Trigger', - 'DataSource', - 'ScannedData', - }, -) - - -class Trigger(proto.Message): - r"""DataScan scheduling and trigger settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - on_demand (google.cloud.dataplex_v1.types.Trigger.OnDemand): - The scan runs once via ``RunDataScan`` API. - - This field is a member of `oneof`_ ``mode``. - schedule (google.cloud.dataplex_v1.types.Trigger.Schedule): - The scan is scheduled to run periodically. - - This field is a member of `oneof`_ ``mode``. - """ - - class OnDemand(proto.Message): - r"""The scan runs once via ``RunDataScan`` API. - """ - - class Schedule(proto.Message): - r"""The scan is scheduled to run periodically. - - Attributes: - cron (str): - Required. `Cron `__ - schedule for running scans periodically. - - To explicitly set a timezone in the cron tab, apply a prefix - in the cron tab: **"CRON_TZ=${IANA_TIME_ZONE}"** or - **"TZ=${IANA_TIME_ZONE}"**. The **${IANA_TIME_ZONE}** may - only be a valid string from IANA time zone database - (`wikipedia `__). - For example, ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. - - This field is required for Schedule scans. - """ - - cron: str = proto.Field( - proto.STRING, - number=1, - ) - - on_demand: OnDemand = proto.Field( - proto.MESSAGE, - number=100, - oneof='mode', - message=OnDemand, - ) - schedule: Schedule = proto.Field( - proto.MESSAGE, - number=101, - oneof='mode', - message=Schedule, - ) - - -class DataSource(proto.Message): - r"""The data source for DataScan. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - entity (str): - Immutable. The Dataplex entity that represents the data - source (e.g. BigQuery table) for DataScan, of the form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This field is a member of `oneof`_ ``source``. - resource (str): - Immutable. The service-qualified full resource name of the - cloud resource for a DataScan job to scan against. The field - could be: BigQuery table of type "TABLE" for - DataProfileScan/DataQualityScan Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - - This field is a member of `oneof`_ ``source``. - """ - - entity: str = proto.Field( - proto.STRING, - number=100, - oneof='source', - ) - resource: str = proto.Field( - proto.STRING, - number=101, - oneof='source', - ) - - -class ScannedData(proto.Message): - r"""The data scanned during processing (e.g. in incremental - DataScan) - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - incremental_field (google.cloud.dataplex_v1.types.ScannedData.IncrementalField): - The range denoted by values of an incremental - field - - This field is a member of `oneof`_ ``data_range``. - """ - - class IncrementalField(proto.Message): - r"""A data range denoted by a pair of start/end values of a - field. - - Attributes: - field (str): - The field that contains values which - monotonically increases over time (e.g. a - timestamp column). - start (str): - Value that marks the start of the range. - end (str): - Value that marks the end of the range. - """ - - field: str = proto.Field( - proto.STRING, - number=1, - ) - start: str = proto.Field( - proto.STRING, - number=2, - ) - end: str = proto.Field( - proto.STRING, - number=3, - ) - - incremental_field: IncrementalField = proto.Field( - proto.MESSAGE, - number=1, - oneof='data_range', - message=IncrementalField, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py deleted file mode 100644 index d07ae9df21e3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py +++ /dev/null @@ -1,1444 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'State', - 'Lake', - 'AssetStatus', - 'Zone', - 'Action', - 'Asset', - }, -) - - -class State(proto.Enum): - r"""State of a resource. - - Values: - STATE_UNSPECIFIED (0): - State is not specified. - ACTIVE (1): - Resource is active, i.e., ready to use. - CREATING (2): - Resource is under creation. - DELETING (3): - Resource is under deletion. - ACTION_REQUIRED (4): - Resource is active but has unresolved - actions. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - DELETING = 3 - ACTION_REQUIRED = 4 - - -class Lake(proto.Message): - r"""A lake is a centralized repository for managing enterprise - data across the organization distributed across many cloud - projects, and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources attached to a - lake are referred to as managed resources. Data within these - managed resources can be structured or unstructured. A lake - provides data admins with tools to organize, secure and manage - their data at scale, and provides data scientists and data - engineers an integrated experience to easily search, discover, - analyze and transform data and associated metadata. - - Attributes: - name (str): - Output only. The relative resource name of the lake, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the lake. This ID will be different if - the lake is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the lake was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the lake was last - updated. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the lake. - description (str): - Optional. Description of the lake. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the lake. - service_account (str): - Output only. Service account associated with - this lake. This service account must be - authorized to access or operate on resources - managed by the lake. - metastore (google.cloud.dataplex_v1.types.Lake.Metastore): - Optional. Settings to manage lake and - Dataproc Metastore service instance association. - asset_status (google.cloud.dataplex_v1.types.AssetStatus): - Output only. Aggregated status of the - underlying assets of the lake. - metastore_status (google.cloud.dataplex_v1.types.Lake.MetastoreStatus): - Output only. Metastore status of the lake. - """ - - class Metastore(proto.Message): - r"""Settings to manage association of Dataproc Metastore with a - lake. - - Attributes: - service (str): - Optional. A relative reference to the Dataproc Metastore - (https://cloud.google.com/dataproc-metastore/docs) service - associated with the lake: - ``projects/{project_id}/locations/{location_id}/services/{service_id}`` - """ - - service: str = proto.Field( - proto.STRING, - number=1, - ) - - class MetastoreStatus(proto.Message): - r"""Status of Lake and Dataproc Metastore service instance - association. - - Attributes: - state (google.cloud.dataplex_v1.types.Lake.MetastoreStatus.State): - Current state of association. - message (str): - Additional information about the current - status. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the metastore status of - the lake. - endpoint (str): - The URI of the endpoint used to access the - Metastore service. - """ - class State(proto.Enum): - r"""Current state of association. - - Values: - STATE_UNSPECIFIED (0): - Unspecified. - NONE (1): - A Metastore service instance is not - associated with the lake. - READY (2): - A Metastore service instance is attached to - the lake. - UPDATING (3): - Attach/detach is in progress. - ERROR (4): - Attach/detach could not be done due to - errors. - """ - STATE_UNSPECIFIED = 0 - NONE = 1 - READY = 2 - UPDATING = 3 - ERROR = 4 - - state: 'Lake.MetastoreStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Lake.MetastoreStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - endpoint: str = proto.Field( - proto.STRING, - number=4, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - service_account: str = proto.Field( - proto.STRING, - number=9, - ) - metastore: Metastore = proto.Field( - proto.MESSAGE, - number=102, - message=Metastore, - ) - asset_status: 'AssetStatus' = proto.Field( - proto.MESSAGE, - number=103, - message='AssetStatus', - ) - metastore_status: MetastoreStatus = proto.Field( - proto.MESSAGE, - number=104, - message=MetastoreStatus, - ) - - -class AssetStatus(proto.Message): - r"""Aggregated status of the underlying assets of a lake or zone. - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - active_assets (int): - Number of active assets. - security_policy_applying_assets (int): - Number of assets that are in process of - updating the security policy on attached - resources. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - active_assets: int = proto.Field( - proto.INT32, - number=2, - ) - security_policy_applying_assets: int = proto.Field( - proto.INT32, - number=3, - ) - - -class Zone(proto.Message): - r"""A zone represents a logical group of related assets within a - lake. A zone can be used to map to organizational structure or - represent stages of data readiness from raw to curated. It - provides managing behavior that is shared or inherited by all - contained assets. - - Attributes: - name (str): - Output only. The relative resource name of the zone, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the zone. This ID will be different if - the zone is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the zone was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the zone was last - updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the zone. - description (str): - Optional. Description of the zone. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the zone. - type_ (google.cloud.dataplex_v1.types.Zone.Type): - Required. Immutable. The type of the zone. - discovery_spec (google.cloud.dataplex_v1.types.Zone.DiscoverySpec): - Optional. Specification of the discovery - feature applied to data in this zone. - resource_spec (google.cloud.dataplex_v1.types.Zone.ResourceSpec): - Required. Specification of the resources that - are referenced by the assets within this zone. - asset_status (google.cloud.dataplex_v1.types.AssetStatus): - Output only. Aggregated status of the - underlying assets of the zone. - """ - class Type(proto.Enum): - r"""Type of zone. - - Values: - TYPE_UNSPECIFIED (0): - Zone type not specified. - RAW (1): - A zone that contains data that needs further - processing before it is considered generally - ready for consumption and analytics workloads. - CURATED (2): - A zone that contains data that is considered - to be ready for broader consumption and - analytics workloads. Curated structured data - stored in Cloud Storage must conform to certain - file formats (parquet, avro and orc) and - organized in a hive-compatible directory layout. - """ - TYPE_UNSPECIFIED = 0 - RAW = 1 - CURATED = 2 - - class ResourceSpec(proto.Message): - r"""Settings for resources attached as assets within a zone. - - Attributes: - location_type (google.cloud.dataplex_v1.types.Zone.ResourceSpec.LocationType): - Required. Immutable. The location type of the - resources that are allowed to be attached to the - assets within this zone. - """ - class LocationType(proto.Enum): - r"""Location type of the resources attached to a zone. - - Values: - LOCATION_TYPE_UNSPECIFIED (0): - Unspecified location type. - SINGLE_REGION (1): - Resources that are associated with a single - region. - MULTI_REGION (2): - Resources that are associated with a - multi-region location. - """ - LOCATION_TYPE_UNSPECIFIED = 0 - SINGLE_REGION = 1 - MULTI_REGION = 2 - - location_type: 'Zone.ResourceSpec.LocationType' = proto.Field( - proto.ENUM, - number=1, - enum='Zone.ResourceSpec.LocationType', - ) - - class DiscoverySpec(proto.Message): - r"""Settings to manage the metadata discovery and publishing in a - zone. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enabled (bool): - Required. Whether discovery is enabled. - include_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to include during discovery if - only a subset of the data should considered. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - exclude_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to exclude during discovery. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - csv_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.CsvOptions): - Optional. Configuration for CSV data. - json_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.JsonOptions): - Optional. Configuration for Json data. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running discovery periodically. Successive discovery - runs must be scheduled at least 60 minutes apart. The - default value is to run discovery every 60 minutes. To - explicitly set a timezone to the cron tab, apply a prefix in - the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or - TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a - valid string from IANA time zone database. For example, - \`CRON_TZ=America/New_York 1 - - - - - - - - - \*\ ``, or``\ TZ=America/New_York 1 \* \* \* \*`. - - This field is a member of `oneof`_ ``trigger``. - """ - - class CsvOptions(proto.Message): - r"""Describe CSV and similar semi-structured data formats. - - Attributes: - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. - delimiter (str): - Optional. The delimiter being used to - separate values. This defaults to ','. - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for CSV data. If true, all columns - will be registered as strings. - """ - - header_rows: int = proto.Field( - proto.INT32, - number=1, - ) - delimiter: str = proto.Field( - proto.STRING, - number=2, - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describe JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for Json data. If true, all columns - will be registered as their primitive types - (strings, number or boolean). - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=2, - ) - - enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - include_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - csv_options: 'Zone.DiscoverySpec.CsvOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='Zone.DiscoverySpec.CsvOptions', - ) - json_options: 'Zone.DiscoverySpec.JsonOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='Zone.DiscoverySpec.JsonOptions', - ) - schedule: str = proto.Field( - proto.STRING, - number=10, - oneof='trigger', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - type_: Type = proto.Field( - proto.ENUM, - number=9, - enum=Type, - ) - discovery_spec: DiscoverySpec = proto.Field( - proto.MESSAGE, - number=103, - message=DiscoverySpec, - ) - resource_spec: ResourceSpec = proto.Field( - proto.MESSAGE, - number=104, - message=ResourceSpec, - ) - asset_status: 'AssetStatus' = proto.Field( - proto.MESSAGE, - number=105, - message='AssetStatus', - ) - - -class Action(proto.Message): - r"""Action represents an issue requiring administrator action for - resolution. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - category (google.cloud.dataplex_v1.types.Action.Category): - The category of issue associated with the - action. - issue (str): - Detailed description of the issue requiring - action. - detect_time (google.protobuf.timestamp_pb2.Timestamp): - The time that the issue was detected. - name (str): - Output only. The relative resource name of the action, of - the form: - ``projects/{project}/locations/{location}/lakes/{lake}/actions/{action}`` - ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/actions/{action}`` - ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}/actions/{action}``. - lake (str): - Output only. The relative resource name of the lake, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - zone (str): - Output only. The relative resource name of the zone, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - asset (str): - Output only. The relative resource name of the asset, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - data_locations (MutableSequence[str]): - The list of data locations associated with this action. - Cloud Storage locations are represented as URI paths(E.g. - ``gs://bucket/table1/year=2020/month=Jan/``). BigQuery - locations refer to resource names(E.g. - ``bigquery.googleapis.com/projects/project-id/datasets/dataset-id``). - invalid_data_format (google.cloud.dataplex_v1.types.Action.InvalidDataFormat): - Details for issues related to invalid or - unsupported data formats. - - This field is a member of `oneof`_ ``details``. - incompatible_data_schema (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema): - Details for issues related to incompatible - schemas detected within data. - - This field is a member of `oneof`_ ``details``. - invalid_data_partition (google.cloud.dataplex_v1.types.Action.InvalidDataPartition): - Details for issues related to invalid or - unsupported data partition structure. - - This field is a member of `oneof`_ ``details``. - missing_data (google.cloud.dataplex_v1.types.Action.MissingData): - Details for issues related to absence of data - within managed resources. - - This field is a member of `oneof`_ ``details``. - missing_resource (google.cloud.dataplex_v1.types.Action.MissingResource): - Details for issues related to absence of a - managed resource. - - This field is a member of `oneof`_ ``details``. - unauthorized_resource (google.cloud.dataplex_v1.types.Action.UnauthorizedResource): - Details for issues related to lack of - permissions to access data resources. - - This field is a member of `oneof`_ ``details``. - failed_security_policy_apply (google.cloud.dataplex_v1.types.Action.FailedSecurityPolicyApply): - Details for issues related to applying - security policy. - - This field is a member of `oneof`_ ``details``. - invalid_data_organization (google.cloud.dataplex_v1.types.Action.InvalidDataOrganization): - Details for issues related to invalid data - arrangement. - - This field is a member of `oneof`_ ``details``. - """ - class Category(proto.Enum): - r"""The category of issues. - - Values: - CATEGORY_UNSPECIFIED (0): - Unspecified category. - RESOURCE_MANAGEMENT (1): - Resource management related issues. - SECURITY_POLICY (2): - Security policy related issues. - DATA_DISCOVERY (3): - Data and discovery related issues. - """ - CATEGORY_UNSPECIFIED = 0 - RESOURCE_MANAGEMENT = 1 - SECURITY_POLICY = 2 - DATA_DISCOVERY = 3 - - class MissingResource(proto.Message): - r"""Action details for resource references in assets that cannot - be located. - - """ - - class UnauthorizedResource(proto.Message): - r"""Action details for unauthorized resource issues raised to - indicate that the service account associated with the lake - instance is not authorized to access or manage the resource - associated with an asset. - - """ - - class FailedSecurityPolicyApply(proto.Message): - r"""Failed to apply security policy to the managed resource(s) - under a lake, zone or an asset. For a lake or zone resource, one - or more underlying assets has a failure applying security policy - to the associated managed resource. - - Attributes: - asset (str): - Resource name of one of the assets with - failing security policy application. Populated - for a lake or zone resource only. - """ - - asset: str = proto.Field( - proto.STRING, - number=1, - ) - - class InvalidDataFormat(proto.Message): - r"""Action details for invalid or unsupported data files detected - by discovery. - - Attributes: - sampled_data_locations (MutableSequence[str]): - The list of data locations sampled and used - for format/schema inference. - expected_format (str): - The expected data format of the entity. - new_format (str): - The new unexpected data format within the - entity. - """ - - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - expected_format: str = proto.Field( - proto.STRING, - number=2, - ) - new_format: str = proto.Field( - proto.STRING, - number=3, - ) - - class IncompatibleDataSchema(proto.Message): - r"""Action details for incompatible schemas detected by - discovery. - - Attributes: - table (str): - The name of the table containing invalid - data. - existing_schema (str): - The existing and expected schema of the - table. The schema is provided as a JSON - formatted structure listing columns and data - types. - new_schema (str): - The new and incompatible schema within the - table. The schema is provided as a JSON - formatted structured listing columns and data - types. - sampled_data_locations (MutableSequence[str]): - The list of data locations sampled and used - for format/schema inference. - schema_change (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema.SchemaChange): - Whether the action relates to a schema that - is incompatible or modified. - """ - class SchemaChange(proto.Enum): - r"""Whether the action relates to a schema that is incompatible - or modified. - - Values: - SCHEMA_CHANGE_UNSPECIFIED (0): - Schema change unspecified. - INCOMPATIBLE (1): - Newly discovered schema is incompatible with - existing schema. - MODIFIED (2): - Newly discovered schema has changed from - existing schema for data in a curated zone. - """ - SCHEMA_CHANGE_UNSPECIFIED = 0 - INCOMPATIBLE = 1 - MODIFIED = 2 - - table: str = proto.Field( - proto.STRING, - number=1, - ) - existing_schema: str = proto.Field( - proto.STRING, - number=2, - ) - new_schema: str = proto.Field( - proto.STRING, - number=3, - ) - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - schema_change: 'Action.IncompatibleDataSchema.SchemaChange' = proto.Field( - proto.ENUM, - number=5, - enum='Action.IncompatibleDataSchema.SchemaChange', - ) - - class InvalidDataPartition(proto.Message): - r"""Action details for invalid or unsupported partitions detected - by discovery. - - Attributes: - expected_structure (google.cloud.dataplex_v1.types.Action.InvalidDataPartition.PartitionStructure): - The issue type of InvalidDataPartition. - """ - class PartitionStructure(proto.Enum): - r"""The expected partition structure. - - Values: - PARTITION_STRUCTURE_UNSPECIFIED (0): - PartitionStructure unspecified. - CONSISTENT_KEYS (1): - Consistent hive-style partition definition - (both raw and curated zone). - HIVE_STYLE_KEYS (2): - Hive style partition definition (curated zone - only). - """ - PARTITION_STRUCTURE_UNSPECIFIED = 0 - CONSISTENT_KEYS = 1 - HIVE_STYLE_KEYS = 2 - - expected_structure: 'Action.InvalidDataPartition.PartitionStructure' = proto.Field( - proto.ENUM, - number=1, - enum='Action.InvalidDataPartition.PartitionStructure', - ) - - class MissingData(proto.Message): - r"""Action details for absence of data detected by discovery. - """ - - class InvalidDataOrganization(proto.Message): - r"""Action details for invalid data arrangement. - """ - - category: Category = proto.Field( - proto.ENUM, - number=1, - enum=Category, - ) - issue: str = proto.Field( - proto.STRING, - number=2, - ) - detect_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - name: str = proto.Field( - proto.STRING, - number=5, - ) - lake: str = proto.Field( - proto.STRING, - number=6, - ) - zone: str = proto.Field( - proto.STRING, - number=7, - ) - asset: str = proto.Field( - proto.STRING, - number=8, - ) - data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - invalid_data_format: InvalidDataFormat = proto.Field( - proto.MESSAGE, - number=10, - oneof='details', - message=InvalidDataFormat, - ) - incompatible_data_schema: IncompatibleDataSchema = proto.Field( - proto.MESSAGE, - number=11, - oneof='details', - message=IncompatibleDataSchema, - ) - invalid_data_partition: InvalidDataPartition = proto.Field( - proto.MESSAGE, - number=12, - oneof='details', - message=InvalidDataPartition, - ) - missing_data: MissingData = proto.Field( - proto.MESSAGE, - number=13, - oneof='details', - message=MissingData, - ) - missing_resource: MissingResource = proto.Field( - proto.MESSAGE, - number=14, - oneof='details', - message=MissingResource, - ) - unauthorized_resource: UnauthorizedResource = proto.Field( - proto.MESSAGE, - number=15, - oneof='details', - message=UnauthorizedResource, - ) - failed_security_policy_apply: FailedSecurityPolicyApply = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message=FailedSecurityPolicyApply, - ) - invalid_data_organization: InvalidDataOrganization = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message=InvalidDataOrganization, - ) - - -class Asset(proto.Message): - r"""An asset represents a cloud resource that is being managed - within a lake as a member of a zone. - - Attributes: - name (str): - Output only. The relative resource name of the asset, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the asset. This ID will be different if - the asset is deleted and re-created with the - same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the asset was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the asset was last - updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the asset. - description (str): - Optional. Description of the asset. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the asset. - resource_spec (google.cloud.dataplex_v1.types.Asset.ResourceSpec): - Required. Specification of the resource that - is referenced by this asset. - resource_status (google.cloud.dataplex_v1.types.Asset.ResourceStatus): - Output only. Status of the resource - referenced by this asset. - security_status (google.cloud.dataplex_v1.types.Asset.SecurityStatus): - Output only. Status of the security policy - applied to resource referenced by this asset. - discovery_spec (google.cloud.dataplex_v1.types.Asset.DiscoverySpec): - Optional. Specification of the discovery - feature applied to data referenced by this - asset. When this spec is left unset, the asset - will use the spec set on the parent zone. - discovery_status (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus): - Output only. Status of the discovery feature - applied to data referenced by this asset. - """ - - class SecurityStatus(proto.Message): - r"""Security policy status of the asset. Data security policy, - i.e., readers, writers & owners, should be specified in the - lake/zone/asset IAM policy. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.SecurityStatus.State): - The current state of the security policy - applied to the attached resource. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - """ - class State(proto.Enum): - r"""The state of the security policy. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - READY (1): - Security policy has been successfully applied - to the attached resource. - APPLYING (2): - Security policy is in the process of being - applied to the attached resource. - ERROR (3): - Security policy could not be applied to the - attached resource due to errors. - """ - STATE_UNSPECIFIED = 0 - READY = 1 - APPLYING = 2 - ERROR = 3 - - state: 'Asset.SecurityStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.SecurityStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - class DiscoverySpec(proto.Message): - r"""Settings to manage the metadata discovery and publishing for - an asset. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enabled (bool): - Optional. Whether discovery is enabled. - include_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to include during discovery if - only a subset of the data should considered. - For Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - exclude_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to exclude during discovery. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - csv_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.CsvOptions): - Optional. Configuration for CSV data. - json_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.JsonOptions): - Optional. Configuration for Json data. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running discovery periodically. Successive discovery - runs must be scheduled at least 60 minutes apart. The - default value is to run discovery every 60 minutes. To - explicitly set a timezone to the cron tab, apply a prefix in - the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or - TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a - valid string from IANA time zone database. For example, - \`CRON_TZ=America/New_York 1 - - - - - - - - - \*\ ``, or``\ TZ=America/New_York 1 \* \* \* \*`. - - This field is a member of `oneof`_ ``trigger``. - """ - - class CsvOptions(proto.Message): - r"""Describe CSV and similar semi-structured data formats. - - Attributes: - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. - delimiter (str): - Optional. The delimiter being used to - separate values. This defaults to ','. - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for CSV data. If true, all columns - will be registered as strings. - """ - - header_rows: int = proto.Field( - proto.INT32, - number=1, - ) - delimiter: str = proto.Field( - proto.STRING, - number=2, - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describe JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for Json data. If true, all columns - will be registered as their primitive types - (strings, number or boolean). - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=2, - ) - - enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - include_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - csv_options: 'Asset.DiscoverySpec.CsvOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='Asset.DiscoverySpec.CsvOptions', - ) - json_options: 'Asset.DiscoverySpec.JsonOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='Asset.DiscoverySpec.JsonOptions', - ) - schedule: str = proto.Field( - proto.STRING, - number=10, - oneof='trigger', - ) - - class ResourceSpec(proto.Message): - r"""Identifies the cloud resource that is referenced by this - asset. - - Attributes: - name (str): - Immutable. Relative name of the cloud resource that contains - the data that is being managed within a lake. For example: - ``projects/{project_number}/buckets/{bucket_id}`` - ``projects/{project_number}/datasets/{dataset_id}`` - type_ (google.cloud.dataplex_v1.types.Asset.ResourceSpec.Type): - Required. Immutable. Type of resource. - read_access_mode (google.cloud.dataplex_v1.types.Asset.ResourceSpec.AccessMode): - Optional. Determines how read permissions are - handled for each asset and their associated - tables. Only available to storage buckets - assets. - """ - class Type(proto.Enum): - r"""Type of resource. - - Values: - TYPE_UNSPECIFIED (0): - Type not specified. - STORAGE_BUCKET (1): - Cloud Storage bucket. - BIGQUERY_DATASET (2): - BigQuery dataset. - """ - TYPE_UNSPECIFIED = 0 - STORAGE_BUCKET = 1 - BIGQUERY_DATASET = 2 - - class AccessMode(proto.Enum): - r"""Access Mode determines how data stored within the resource is - read. This is only applicable to storage bucket assets. - - Values: - ACCESS_MODE_UNSPECIFIED (0): - Access mode unspecified. - DIRECT (1): - Default. Data is accessed directly using - storage APIs. - MANAGED (2): - Data is accessed through a managed interface - using BigQuery APIs. - """ - ACCESS_MODE_UNSPECIFIED = 0 - DIRECT = 1 - MANAGED = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Asset.ResourceSpec.Type' = proto.Field( - proto.ENUM, - number=2, - enum='Asset.ResourceSpec.Type', - ) - read_access_mode: 'Asset.ResourceSpec.AccessMode' = proto.Field( - proto.ENUM, - number=5, - enum='Asset.ResourceSpec.AccessMode', - ) - - class ResourceStatus(proto.Message): - r"""Status of the resource referenced by an asset. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.ResourceStatus.State): - The current state of the managed resource. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - managed_access_identity (str): - Output only. Service account associated with - the BigQuery Connection. - """ - class State(proto.Enum): - r"""The state of a resource. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - READY (1): - Resource does not have any errors. - ERROR (2): - Resource has errors. - """ - STATE_UNSPECIFIED = 0 - READY = 1 - ERROR = 2 - - state: 'Asset.ResourceStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.ResourceStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - managed_access_identity: str = proto.Field( - proto.STRING, - number=4, - ) - - class DiscoveryStatus(proto.Message): - r"""Status of discovery for an asset. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.State): - The current status of the discovery feature. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - The start time of the last discovery run. - stats (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.Stats): - Data Stats of the asset reported by - discovery. - last_run_duration (google.protobuf.duration_pb2.Duration): - The duration of the last discovery run. - """ - class State(proto.Enum): - r"""Current state of discovery. - - Values: - STATE_UNSPECIFIED (0): - State is unspecified. - SCHEDULED (1): - Discovery for the asset is scheduled. - IN_PROGRESS (2): - Discovery for the asset is running. - PAUSED (3): - Discovery for the asset is currently paused - (e.g. due to a lack of available resources). It - will be automatically resumed. - DISABLED (5): - Discovery for the asset is disabled. - """ - STATE_UNSPECIFIED = 0 - SCHEDULED = 1 - IN_PROGRESS = 2 - PAUSED = 3 - DISABLED = 5 - - class Stats(proto.Message): - r"""The aggregated data statistics for the asset reported by - discovery. - - Attributes: - data_items (int): - The count of data items within the referenced - resource. - data_size (int): - The number of stored data bytes within the - referenced resource. - tables (int): - The count of table entities within the - referenced resource. - filesets (int): - The count of fileset entities within the - referenced resource. - """ - - data_items: int = proto.Field( - proto.INT64, - number=1, - ) - data_size: int = proto.Field( - proto.INT64, - number=2, - ) - tables: int = proto.Field( - proto.INT64, - number=3, - ) - filesets: int = proto.Field( - proto.INT64, - number=4, - ) - - state: 'Asset.DiscoveryStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.DiscoveryStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - stats: 'Asset.DiscoveryStatus.Stats' = proto.Field( - proto.MESSAGE, - number=6, - message='Asset.DiscoveryStatus.Stats', - ) - last_run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - resource_spec: ResourceSpec = proto.Field( - proto.MESSAGE, - number=100, - message=ResourceSpec, - ) - resource_status: ResourceStatus = proto.Field( - proto.MESSAGE, - number=101, - message=ResourceStatus, - ) - security_status: SecurityStatus = proto.Field( - proto.MESSAGE, - number=103, - message=SecurityStatus, - ) - discovery_spec: DiscoverySpec = proto.Field( - proto.MESSAGE, - number=106, - message=DiscoverySpec, - ) - discovery_status: DiscoveryStatus = proto.Field( - proto.MESSAGE, - number=107, - message=DiscoveryStatus, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py deleted file mode 100644 index 3fa729be6197..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'ResourceAccessSpec', - 'DataAccessSpec', - }, -) - - -class ResourceAccessSpec(proto.Message): - r"""ResourceAccessSpec holds the access control configuration to - be enforced on the resources, for example, Cloud Storage bucket, - BigQuery dataset, BigQuery table. - - Attributes: - readers (MutableSequence[str]): - Optional. The format of strings follows the - pattern followed by IAM in the bindings. - user:{email}, serviceAccount:{email} - group:{email}. The set of principals to be - granted reader role on the resource. - writers (MutableSequence[str]): - Optional. The set of principals to be granted - writer role on the resource. - owners (MutableSequence[str]): - Optional. The set of principals to be granted - owner role on the resource. - """ - - readers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - writers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - owners: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DataAccessSpec(proto.Message): - r"""DataAccessSpec holds the access control configuration to be - enforced on data stored within resources (eg: rows, columns in - BigQuery Tables). When associated with data, the data is only - accessible to principals explicitly granted access through the - DataAccessSpec. Principals with access to the containing - resource are not implicitly granted access. - - Attributes: - readers (MutableSequence[str]): - Optional. The format of strings follows the - pattern followed by IAM in the bindings. - user:{email}, serviceAccount:{email} - group:{email}. The set of principals to be - granted reader role on data stored within - resources. - """ - - readers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py deleted file mode 100644 index 5493ed665a56..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py +++ /dev/null @@ -1,1395 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import tasks as gcd_tasks -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'CreateLakeRequest', - 'UpdateLakeRequest', - 'DeleteLakeRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListLakeActionsRequest', - 'ListActionsResponse', - 'GetLakeRequest', - 'CreateZoneRequest', - 'UpdateZoneRequest', - 'DeleteZoneRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'ListZoneActionsRequest', - 'GetZoneRequest', - 'CreateAssetRequest', - 'UpdateAssetRequest', - 'DeleteAssetRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListAssetActionsRequest', - 'GetAssetRequest', - 'OperationMetadata', - 'CreateTaskRequest', - 'UpdateTaskRequest', - 'DeleteTaskRequest', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'GetJobRequest', - 'RunTaskRequest', - 'RunTaskResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'CancelJobRequest', - 'CreateEnvironmentRequest', - 'UpdateEnvironmentRequest', - 'DeleteEnvironmentRequest', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'GetEnvironmentRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - }, -) - - -class CreateLakeRequest(proto.Message): - r"""Create lake request. - - Attributes: - parent (str): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. - lake_id (str): - Required. Lake identifier. This ID will be used to generate - names such as database and dataset names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Lake resource - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - lake_id: str = proto.Field( - proto.STRING, - number=2, - ) - lake: resources.Lake = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Lake, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateLakeRequest(proto.Message): - r"""Update lake request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - lake: resources.Lake = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Lake, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteLakeRequest(proto.Message): - r"""Delete lake request. - - Attributes: - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLakesRequest(proto.Message): - r"""List lakes request. - - Attributes: - parent (str): - Required. The resource name of the lake location, of the - form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. - page_size (int): - Optional. Maximum number of Lakes to return. - The service may return fewer than this value. If - unspecified, at most 10 lakes will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListLakes`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListLakes`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListLakesResponse(proto.Message): - r"""List lakes response. - - Attributes: - lakes (MutableSequence[google.cloud.dataplex_v1.types.Lake]): - Lakes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - lakes: MutableSequence[resources.Lake] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Lake, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListLakeActionsRequest(proto.Message): - r"""List lake actions request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListLakeActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListLakeActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListActionsResponse(proto.Message): - r"""List actions response. - - Attributes: - actions (MutableSequence[google.cloud.dataplex_v1.types.Action]): - Actions under the given parent - lake/zone/asset. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - actions: MutableSequence[resources.Action] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Action, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetLakeRequest(proto.Message): - r"""Get lake request. - - Attributes: - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateZoneRequest(proto.Message): - r"""Create zone request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - zone_id (str): - Required. Zone identifier. This ID will be used to generate - names such as database and dataset names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in a - project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - zone (google.cloud.dataplex_v1.types.Zone): - Required. Zone resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - zone_id: str = proto.Field( - proto.STRING, - number=2, - ) - zone: resources.Zone = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Zone, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateZoneRequest(proto.Message): - r"""Update zone request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - zone: resources.Zone = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Zone, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteZoneRequest(proto.Message): - r"""Delete zone request. - - Attributes: - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListZonesRequest(proto.Message): - r"""List zones request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of zones to return. - The service may return fewer than this value. If - unspecified, at most 10 zones will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListZones`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListZones`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListZonesResponse(proto.Message): - r"""List zones response. - - Attributes: - zones (MutableSequence[google.cloud.dataplex_v1.types.Zone]): - Zones under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - zones: MutableSequence[resources.Zone] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Zone, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListZoneActionsRequest(proto.Message): - r"""List zone actions request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListZoneActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListZoneActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetZoneRequest(proto.Message): - r"""Get zone request. - - Attributes: - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAssetRequest(proto.Message): - r"""Create asset request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - asset_id (str): - Required. Asset identifier. This ID will be used to generate - names such as table names when publishing metadata to Hive - Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Asset resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - asset_id: str = proto.Field( - proto.STRING, - number=2, - ) - asset: resources.Asset = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Asset, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateAssetRequest(proto.Message): - r"""Update asset request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - asset: resources.Asset = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Asset, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteAssetRequest(proto.Message): - r"""Delete asset request. - - Attributes: - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListAssetsRequest(proto.Message): - r"""List assets request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - page_size (int): - Optional. Maximum number of asset to return. - The service may return fewer than this value. If - unspecified, at most 10 assets will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListAssets`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListAssets`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAssetsResponse(proto.Message): - r"""List assets response. - - Attributes: - assets (MutableSequence[google.cloud.dataplex_v1.types.Asset]): - Asset under the given parent zone. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - assets: MutableSequence[resources.Asset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Asset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListAssetActionsRequest(proto.Message): - r"""List asset actions request. - - Attributes: - parent (str): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListAssetActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListAssetActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetAssetRequest(proto.Message): - r"""Get asset request. - - Attributes: - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of a long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class CreateTaskRequest(proto.Message): - r"""Create task request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - task_id (str): - Required. Task identifier. - task (google.cloud.dataplex_v1.types.Task): - Required. Task resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - task: gcd_tasks.Task = proto.Field( - proto.MESSAGE, - number=3, - message=gcd_tasks.Task, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateTaskRequest(proto.Message): - r"""Update task request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - task (google.cloud.dataplex_v1.types.Task): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - task: gcd_tasks.Task = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tasks.Task, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteTaskRequest(proto.Message): - r"""Delete task request. - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTasksRequest(proto.Message): - r"""List tasks request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of tasks to return. - The service may return fewer than this value. If - unspecified, at most 10 tasks will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListZones`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListZones`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListTasksResponse(proto.Message): - r"""List tasks response. - - Attributes: - tasks (MutableSequence[google.cloud.dataplex_v1.types.Task]): - Tasks under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - tasks: MutableSequence[gcd_tasks.Task] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tasks.Task, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetTaskRequest(proto.Message): - r"""Get task request. - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetJobRequest(proto.Message): - r"""Get job request. - - Attributes: - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RunTaskRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the task. - If the map is left empty, the task will run with - existing labels from task definition. If the map - contains an entry with a new key, the same will - be added to existing set of labels. If the map - contains an entry with an existing label key in - task definition, the task will run with new - label value for that entry. Clearing an existing - label will require label value to be explicitly - set to a hyphen "-". The label value cannot be - empty. - args (MutableMapping[str, str]): - Optional. Execution spec arguments. If the - map is left empty, the task will run with - existing execution spec args from task - definition. If the map contains an entry with a - new key, the same will be added to existing set - of args. If the map contains an entry with an - existing arg key in task definition, the task - will run with new arg value for that entry. - Clearing an existing arg will require arg value - to be explicitly set to a hyphen "-". The arg - value cannot be empty. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - args: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class RunTaskResponse(proto.Message): - r""" - - Attributes: - job (google.cloud.dataplex_v1.types.Job): - Jobs created by RunTask API. - """ - - job: gcd_tasks.Job = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tasks.Job, - ) - - -class ListJobsRequest(proto.Message): - r"""List jobs request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - page_size (int): - Optional. Maximum number of jobs to return. - The service may return fewer than this value. If - unspecified, at most 10 jobs will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListJobs`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListJobs`` - must match the call that provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobsResponse(proto.Message): - r"""List jobs response. - - Attributes: - jobs (MutableSequence[google.cloud.dataplex_v1.types.Job]): - Jobs under a given task. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[gcd_tasks.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tasks.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelJobRequest(proto.Message): - r"""Cancel task jobs. - - Attributes: - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEnvironmentRequest(proto.Message): - r"""Create environment request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - environment_id (str): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Environment resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - environment_id: str = proto.Field( - proto.STRING, - number=2, - ) - environment: analyze.Environment = proto.Field( - proto.MESSAGE, - number=3, - message=analyze.Environment, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEnvironmentRequest(proto.Message): - r"""Update environment request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - environment: analyze.Environment = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Environment, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEnvironmentRequest(proto.Message): - r"""Delete environment request. - - Attributes: - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListEnvironmentsRequest(proto.Message): - r"""List environments request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of environments to - return. The service may return fewer than this - value. If unspecified, at most 10 environments - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEnvironments`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEnvironments`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEnvironmentsResponse(proto.Message): - r"""List environments response. - - Attributes: - environments (MutableSequence[google.cloud.dataplex_v1.types.Environment]): - Environments under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - environments: MutableSequence[analyze.Environment] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Environment, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEnvironmentRequest(proto.Message): - r"""Get environment request. - - Attributes: - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSessionsRequest(proto.Message): - r"""List sessions request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - page_size (int): - Optional. Maximum number of sessions to - return. The service may return fewer than this - value. If unspecified, at most 10 sessions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListSessions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListSessions`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. The following ``mode`` filter is - supported to return only the sessions belonging to the - requester when the mode is USER and return sessions of all - the users when the mode is ADMIN. When no filter is sent - default to USER mode. NOTE: When the mode is ADMIN, the - requester should have - ``dataplex.environments.listAllSessions`` permission to list - all sessions, in absence of the permission, the request - fails. - - mode = ADMIN \| USER - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSessionsResponse(proto.Message): - r"""List sessions response. - - Attributes: - sessions (MutableSequence[google.cloud.dataplex_v1.types.Session]): - Sessions under a given environment. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - sessions: MutableSequence[analyze.Session] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Session, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py deleted file mode 100644 index 3f0b83c776fc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py +++ /dev/null @@ -1,751 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import resources -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Task', - 'Job', - }, -) - - -class Task(proto.Message): - r"""A task represents a user-visible job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the task, of the - form: - projects/{project_number}/locations/{location_id}/lakes/{lake_id}/ - tasks/{task_id}. - uid (str): - Output only. System generated globally unique - ID for the task. This ID will be different if - the task is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the task was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the task was last - updated. - description (str): - Optional. Description of the task. - display_name (str): - Optional. User friendly display name. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the task. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the task. - trigger_spec (google.cloud.dataplex_v1.types.Task.TriggerSpec): - Required. Spec related to how often and when - a task should be triggered. - execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): - Required. Spec related to how a task is - executed. - execution_status (google.cloud.dataplex_v1.types.Task.ExecutionStatus): - Output only. Status of the latest task - executions. - spark (google.cloud.dataplex_v1.types.Task.SparkTaskConfig): - Config related to running custom Spark tasks. - - This field is a member of `oneof`_ ``config``. - notebook (google.cloud.dataplex_v1.types.Task.NotebookTaskConfig): - Config related to running scheduled - Notebooks. - - This field is a member of `oneof`_ ``config``. - """ - - class InfrastructureSpec(proto.Message): - r"""Configuration for the underlying infrastructure used to run - workloads. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - batch (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.BatchComputeResources): - Compute resources needed for a Task when - using Dataproc Serverless. - - This field is a member of `oneof`_ ``resources``. - container_image (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.ContainerImageRuntime): - Container Image Runtime Configuration. - - This field is a member of `oneof`_ ``runtime``. - vpc_network (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.VpcNetwork): - Vpc network. - - This field is a member of `oneof`_ ``network``. - """ - - class BatchComputeResources(proto.Message): - r"""Batch compute resources associated with the task. - - Attributes: - executors_count (int): - Optional. Total number of job executors. Executor Count - should be between 2 and 100. [Default=2] - max_executors_count (int): - Optional. Max configurable executors. If max_executors_count - > executors_count, then auto-scaling is enabled. Max - Executor Count should be between 2 and 1000. [Default=1000] - """ - - executors_count: int = proto.Field( - proto.INT32, - number=1, - ) - max_executors_count: int = proto.Field( - proto.INT32, - number=2, - ) - - class ContainerImageRuntime(proto.Message): - r"""Container Image Runtime Configuration used with Batch - execution. - - Attributes: - image (str): - Optional. Container image to use. - java_jars (MutableSequence[str]): - Optional. A list of Java JARS to add to the - classpath. Valid input includes Cloud Storage - URIs to Jar binaries. For example, - gs://bucket-name/my/path/to/file.jar - python_packages (MutableSequence[str]): - Optional. A list of python packages to be - installed. Valid formats include Cloud Storage - URI to a PIP installable library. For example, - gs://bucket-name/my/path/to/lib.tar.gz - properties (MutableMapping[str, str]): - Optional. Override to common configuration of open source - components installed on the Dataproc cluster. The properties - to set on daemon config files. Property keys are specified - in ``prefix:property`` format, for example - ``core:hadoop.tmp.dir``. For more information, see `Cluster - properties `__. - """ - - image: str = proto.Field( - proto.STRING, - number=1, - ) - java_jars: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_packages: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - class VpcNetwork(proto.Message): - r"""Cloud VPC Network used to run the infrastructure. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - network (str): - Optional. The Cloud VPC network in which the - job is run. By default, the Cloud VPC network - named Default within the project is used. - - This field is a member of `oneof`_ ``network_name``. - sub_network (str): - Optional. The Cloud VPC sub-network in which - the job is run. - - This field is a member of `oneof`_ ``network_name``. - network_tags (MutableSequence[str]): - Optional. List of network tags to apply to - the job. - """ - - network: str = proto.Field( - proto.STRING, - number=1, - oneof='network_name', - ) - sub_network: str = proto.Field( - proto.STRING, - number=2, - oneof='network_name', - ) - network_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - batch: 'Task.InfrastructureSpec.BatchComputeResources' = proto.Field( - proto.MESSAGE, - number=52, - oneof='resources', - message='Task.InfrastructureSpec.BatchComputeResources', - ) - container_image: 'Task.InfrastructureSpec.ContainerImageRuntime' = proto.Field( - proto.MESSAGE, - number=101, - oneof='runtime', - message='Task.InfrastructureSpec.ContainerImageRuntime', - ) - vpc_network: 'Task.InfrastructureSpec.VpcNetwork' = proto.Field( - proto.MESSAGE, - number=150, - oneof='network', - message='Task.InfrastructureSpec.VpcNetwork', - ) - - class TriggerSpec(proto.Message): - r"""Task scheduling and trigger settings. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dataplex_v1.types.Task.TriggerSpec.Type): - Required. Immutable. Trigger type of the - user-specified Task. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The first run of the task will be after this time. - If not specified, the task will run shortly after being - submitted if ON_DEMAND and based on the schedule if - RECURRING. - disabled (bool): - Optional. Prevent the task from executing. - This does not cancel already running tasks. It - is intended to temporarily disable RECURRING - tasks. - max_retries (int): - Optional. Number of retry attempts before - aborting. Set to zero to never attempt to retry - a failed task. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running tasks periodically. To explicitly set a timezone - to the cron tab, apply a prefix in the cron tab: - "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The - ${IANA_TIME_ZONE} may only be a valid string from IANA time - zone database. For example, - ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. This field is required - for RECURRING tasks. - - This field is a member of `oneof`_ ``trigger``. - """ - class Type(proto.Enum): - r"""Determines how often and when the job will run. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified trigger type. - ON_DEMAND (1): - The task runs one-time shortly after Task - Creation. - RECURRING (2): - The task is scheduled to run periodically. - """ - TYPE_UNSPECIFIED = 0 - ON_DEMAND = 1 - RECURRING = 2 - - type_: 'Task.TriggerSpec.Type' = proto.Field( - proto.ENUM, - number=5, - enum='Task.TriggerSpec.Type', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - disabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - max_retries: int = proto.Field( - proto.INT32, - number=7, - ) - schedule: str = proto.Field( - proto.STRING, - number=100, - oneof='trigger', - ) - - class ExecutionSpec(proto.Message): - r"""Execution related settings, like retry and service_account. - - Attributes: - args (MutableMapping[str, str]): - Optional. The arguments to pass to the task. The args can - use placeholders of the format ${placeholder} as part of - key/value string. These will be interpolated before passing - the args to the driver. Currently supported placeholders: - - - ${task_id} - - ${job_time} To pass positional args, set the key as - TASK_ARGS. The value should be a comma-separated string - of all the positional arguments. To use a delimiter other - than comma, refer to - https://cloud.google.com/sdk/gcloud/reference/topic/escaping. - In case of other keys being present in the args, then - TASK_ARGS will be passed as the last argument. - service_account (str): - Required. Service account to use to execute a - task. If not provided, the default Compute - service account for the project is used. - project (str): - Optional. The project in which jobs are run. By default, the - project containing the Lake is used. If a project is - provided, the - [ExecutionSpec.service_account][google.cloud.dataplex.v1.Task.ExecutionSpec.service_account] - must belong to this project. - max_job_execution_lifetime (google.protobuf.duration_pb2.Duration): - Optional. The maximum duration after which - the job execution is expired. - kms_key (str): - Optional. The Cloud KMS key to use for encryption, of the - form: - ``projects/{project_number}/locations/{location_id}/keyRings/{key-ring-name}/cryptoKeys/{key-name}``. - """ - - args: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - service_account: str = proto.Field( - proto.STRING, - number=5, - ) - project: str = proto.Field( - proto.STRING, - number=7, - ) - max_job_execution_lifetime: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - kms_key: str = proto.Field( - proto.STRING, - number=9, - ) - - class SparkTaskConfig(proto.Message): - r"""User-specified config for running a Spark task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - main_jar_file_uri (str): - The Cloud Storage URI of the jar file that contains the main - class. The execution args are passed in as a sequence of - named process arguments (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - main_class (str): - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. The execution args are - passed in as a sequence of named process arguments - (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - python_script_file (str): - The Gcloud Storage URI of the main Python file to use as the - driver. Must be a .py file. The execution args are passed in - as a sequence of named process arguments (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - sql_script_file (str): - A reference to a query file. This should be the Cloud - Storage URI of the query file. The execution args are used - to declare a set of script variables (``set key="value";``). - - This field is a member of `oneof`_ ``driver``. - sql_script (str): - The query text. The execution args are used to declare a set - of script variables (``set key="value";``). - - This field is a member of `oneof`_ ``driver``. - file_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of files to be - placed in the working directory of each - executor. - archive_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of archives to - be extracted into the working directory of each - executor. Supported file types: .jar, .tar, - .tar.gz, .tgz, and .zip. - infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): - Optional. Infrastructure specification for - the execution. - """ - - main_jar_file_uri: str = proto.Field( - proto.STRING, - number=100, - oneof='driver', - ) - main_class: str = proto.Field( - proto.STRING, - number=101, - oneof='driver', - ) - python_script_file: str = proto.Field( - proto.STRING, - number=102, - oneof='driver', - ) - sql_script_file: str = proto.Field( - proto.STRING, - number=104, - oneof='driver', - ) - sql_script: str = proto.Field( - proto.STRING, - number=105, - oneof='driver', - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( - proto.MESSAGE, - number=6, - message='Task.InfrastructureSpec', - ) - - class NotebookTaskConfig(proto.Message): - r"""Config for running scheduled notebooks. - - Attributes: - notebook (str): - Required. Path to input notebook. This can be the Cloud - Storage URI of the notebook file or the path to a Notebook - Content. The execution args are accessible as environment - variables (``TASK_key=value``). - infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): - Optional. Infrastructure specification for - the execution. - file_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of files to be - placed in the working directory of each - executor. - archive_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of archives to - be extracted into the working directory of each - executor. Supported file types: .jar, .tar, - .tar.gz, .tgz, and .zip. - """ - - notebook: str = proto.Field( - proto.STRING, - number=4, - ) - infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( - proto.MESSAGE, - number=3, - message='Task.InfrastructureSpec', - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - class ExecutionStatus(proto.Message): - r"""Status of the task execution (e.g. Jobs). - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of the status. - latest_job (google.cloud.dataplex_v1.types.Job): - Output only. latest job execution - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - latest_job: 'Job' = proto.Field( - proto.MESSAGE, - number=9, - message='Job', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=7, - enum=resources.State, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - trigger_spec: TriggerSpec = proto.Field( - proto.MESSAGE, - number=100, - message=TriggerSpec, - ) - execution_spec: ExecutionSpec = proto.Field( - proto.MESSAGE, - number=101, - message=ExecutionSpec, - ) - execution_status: ExecutionStatus = proto.Field( - proto.MESSAGE, - number=201, - message=ExecutionStatus, - ) - spark: SparkTaskConfig = proto.Field( - proto.MESSAGE, - number=300, - oneof='config', - message=SparkTaskConfig, - ) - notebook: NotebookTaskConfig = proto.Field( - proto.MESSAGE, - number=302, - oneof='config', - message=NotebookTaskConfig, - ) - - -class Job(proto.Message): - r"""A job represents an instance of a task. - - Attributes: - name (str): - Output only. The relative resource name of the job, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - uid (str): - Output only. System generated globally unique - ID for the job. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the job was - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the job ended. - state (google.cloud.dataplex_v1.types.Job.State): - Output only. Execution state for the job. - retry_count (int): - Output only. The number of times the job has - been retried (excluding the initial attempt). - service (google.cloud.dataplex_v1.types.Job.Service): - Output only. The underlying service running a - job. - service_job (str): - Output only. The full resource name for the - job run under a particular service. - message (str): - Output only. Additional information about the - current state. - labels (MutableMapping[str, str]): - Output only. User-defined labels for the - task. - trigger (google.cloud.dataplex_v1.types.Job.Trigger): - Output only. Job execution trigger. - execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): - Output only. Spec related to how a task is - executed. - """ - class Service(proto.Enum): - r""" - - Values: - SERVICE_UNSPECIFIED (0): - Service used to run the job is unspecified. - DATAPROC (1): - Dataproc service is used to run this job. - """ - SERVICE_UNSPECIFIED = 0 - DATAPROC = 1 - - class State(proto.Enum): - r""" - - Values: - STATE_UNSPECIFIED (0): - The job state is unknown. - RUNNING (1): - The job is running. - CANCELLING (2): - The job is cancelling. - CANCELLED (3): - The job cancellation was successful. - SUCCEEDED (4): - The job completed successfully. - FAILED (5): - The job is no longer running due to an error. - ABORTED (6): - The job was cancelled outside of Dataplex. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - CANCELLING = 2 - CANCELLED = 3 - SUCCEEDED = 4 - FAILED = 5 - ABORTED = 6 - - class Trigger(proto.Enum): - r"""Job execution trigger. - - Values: - TRIGGER_UNSPECIFIED (0): - The trigger is unspecified. - TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. - RUN_REQUEST (2): - The job was triggered by the explicit call of - Task API. - """ - TRIGGER_UNSPECIFIED = 0 - TASK_CONFIG = 1 - RUN_REQUEST = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - retry_count: int = proto.Field( - proto.UINT32, - number=6, - ) - service: Service = proto.Field( - proto.ENUM, - number=7, - enum=Service, - ) - service_job: str = proto.Field( - proto.STRING, - number=8, - ) - message: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - trigger: Trigger = proto.Field( - proto.ENUM, - number=11, - enum=Trigger, - ) - execution_spec: 'Task.ExecutionSpec' = proto.Field( - proto.MESSAGE, - number=100, - message='Task.ExecutionSpec', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py deleted file mode 100644 index a1ad2c6fe6c7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-dataplex' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dataplex_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dataplex_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py deleted file mode 100644 index 755b258dfcdc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py deleted file mode 100644 index 86dc1d13724d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py deleted file mode 100644 index 05e1e13996e8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py deleted file mode 100644 index 4ffcdc99175a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py deleted file mode 100644 index 8ce69f86a352..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py deleted file mode 100644 index a0b18212967b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py deleted file mode 100644 index 545b7267de18..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py deleted file mode 100644 index 6bed1ae44985..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py deleted file mode 100644 index 787280513289..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py deleted file mode 100644 index ce89096f240b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py deleted file mode 100644 index 3351c7f53839..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py deleted file mode 100644 index 019b3acb22c2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py deleted file mode 100644 index b7cb7aa70933..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py deleted file mode 100644 index 6c772dc5c765..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = client.delete_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py deleted file mode 100644 index 666df269d16d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py deleted file mode 100644 index 91f695f2841e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py deleted file mode 100644 index 81d3d81b8f26..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_aspect_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py deleted file mode 100644 index 2c3b728838b7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_aspect_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py deleted file mode 100644 index b996bab77591..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py deleted file mode 100644 index e0669aadb927..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py deleted file mode 100644 index 95c037a589b6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py deleted file mode 100644 index b53082957bfa..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py deleted file mode 100644 index 58083a001b8a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py deleted file mode 100644 index 4098fc417a9e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py deleted file mode 100644 index 64778a1cfb7a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAspectTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListAspectTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListAspectTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py deleted file mode 100644 index 9a8cd1eec4cb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAspectTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListAspectTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListAspectTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py deleted file mode 100644 index fc611110b8d5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py deleted file mode 100644 index faabd3c1cea0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py deleted file mode 100644 index 91a2ad641c95..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryGroups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py deleted file mode 100644 index 61cade3ceec3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryGroups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py deleted file mode 100644 index a85ebb571be1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py deleted file mode 100644 index f0fff3cc52c9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py deleted file mode 100644 index 4b90f27553bb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_LookupEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py deleted file mode 100644 index 0a1d7dfbd187..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_LookupEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py deleted file mode 100644 index 9115fb45d14b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_SearchEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_SearchEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py deleted file mode 100644 index adc2a58f4e3d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_SearchEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_SearchEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py deleted file mode 100644 index 4d5cbf23aadc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py deleted file mode 100644 index 9408a61c68e2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py deleted file mode 100644 index e88986c399d6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py deleted file mode 100644 index f8e0716939f9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py deleted file mode 100644 index 482e9db7a27f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py deleted file mode 100644 index 69fde35c3515..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py deleted file mode 100644 index 6fc55c02c3ab..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py deleted file mode 100644 index 2f240368f075..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py deleted file mode 100644 index 353f1664db39..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_CreateContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = await client.create_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_CreateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py deleted file mode 100644 index 80aa3386d3ae..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_CreateContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = client.create_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_CreateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py deleted file mode 100644 index 195baf10d664..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_DeleteContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - await client.delete_content(request=request) - - -# [END dataplex_v1_generated_ContentService_DeleteContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py deleted file mode 100644 index c56872e3a3f1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_DeleteContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - client.delete_content(request=request) - - -# [END dataplex_v1_generated_ContentService_DeleteContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py deleted file mode 100644 index 3c82c57a70ad..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py deleted file mode 100644 index dc25faccfde3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = client.get_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py deleted file mode 100644 index b9f3c8d527cf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py deleted file mode 100644 index 1b6ca2879922..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py deleted file mode 100644 index 1d591e9fc2c6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_ListContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_ContentService_ListContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py deleted file mode 100644 index ad07990ff1ce..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_ListContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_ContentService_ListContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py deleted file mode 100644 index c85e40228b20..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py deleted file mode 100644 index be43e3cb4138..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py deleted file mode 100644 index f7791f437bd3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py deleted file mode 100644 index 6133c0d7cf57..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py deleted file mode 100644 index bef234c8b411..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_UpdateContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = await client.update_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_UpdateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py deleted file mode 100644 index 5883d944b68c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_UpdateContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = client.update_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_UpdateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py deleted file mode 100644 index 21651df34ae5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_CreateDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_CreateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py deleted file mode 100644 index 77776f6960df..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_CreateDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_CreateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py deleted file mode 100644 index 06728d347dcc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_DeleteDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_DeleteDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py deleted file mode 100644 index d3b6b57e56cc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_DeleteDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_DeleteDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py deleted file mode 100644 index cb75e6e697af..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateDataQualityRules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = await client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py deleted file mode 100644 index ad0deadc75f9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateDataQualityRules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py deleted file mode 100644 index 676de83d3706..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py deleted file mode 100644 index e835b0ca591a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScanJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScanJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScanJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py deleted file mode 100644 index bddec8d6189d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScanJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScanJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScanJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py deleted file mode 100644 index fe36809dbf5b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py deleted file mode 100644 index 9232f30a6311..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScanJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py deleted file mode 100644 index e7cb0443ec86..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScanJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py deleted file mode 100644 index 51b2f86cfdd9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScans_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScans_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py deleted file mode 100644 index 7325c27fda56..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScans_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScans_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py deleted file mode 100644 index 98a6b2322fcf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_RunDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.run_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_RunDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py deleted file mode 100644 index e895451b7053..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_RunDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.run_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_RunDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py deleted file mode 100644 index 124a401a492f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_UpdateDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_UpdateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py deleted file mode 100644 index 558f26403249..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_UpdateDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_UpdateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py deleted file mode 100644 index a005a033674c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py deleted file mode 100644 index d182498546d6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py deleted file mode 100644 index cab78ee20922..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py deleted file mode 100644 index d560872c9a07..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py deleted file mode 100644 index 1217b679c08e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py deleted file mode 100644 index a6978897a170..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py deleted file mode 100644 index 22ddcc5604ff..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py deleted file mode 100644 index 4576cb8067f2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py deleted file mode 100644 index 84d600eb7954..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py deleted file mode 100644 index 80ed777ff441..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py deleted file mode 100644 index 4c5ef9e4b313..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py deleted file mode 100644 index 08883b505729..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py deleted file mode 100644 index 350639322e18..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py deleted file mode 100644 index 94a95946fcd5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py deleted file mode 100644 index 94f2288be733..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py deleted file mode 100644 index 2a1adb57e6df..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py deleted file mode 100644 index f66f77ea22a3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py deleted file mode 100644 index 15f085db3c20..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py deleted file mode 100644 index 207e67a7215a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributeBindings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py deleted file mode 100644 index 9139b66840ed..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributeBindings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py deleted file mode 100644 index 6e9569b3ea57..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py deleted file mode 100644 index 16454380d76d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py deleted file mode 100644 index 6a1b36361816..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py deleted file mode 100644 index cba0a7942e3b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py deleted file mode 100644 index ceb29efa2ffa..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py deleted file mode 100644 index a0a822815f7f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py deleted file mode 100644 index 39fbd39776d8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py deleted file mode 100644 index 0174b1fe11cd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py deleted file mode 100644 index cf45b97b2582..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py deleted file mode 100644 index c1ff22cfad5b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py deleted file mode 100644 index 3c62de319a83..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_job(request=request) - - -# [END dataplex_v1_generated_DataplexService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py deleted file mode 100644 index 0f03e83feca8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_job(request=request) - - -# [END dataplex_v1_generated_DataplexService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py deleted file mode 100644 index 22703812444a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py deleted file mode 100644 index d11ffbfbfb6a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py deleted file mode 100644 index 04e8b56fc7f5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py deleted file mode 100644 index 572c041e342b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py deleted file mode 100644 index 510fe8247499..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py deleted file mode 100644 index f18dc0a47c48..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py deleted file mode 100644 index 2709109a6790..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py deleted file mode 100644 index 8bb4de8c071c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py deleted file mode 100644 index 0148ee8fc210..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py deleted file mode 100644 index 734e213bee7f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py deleted file mode 100644 index 7fcc4e675180..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py deleted file mode 100644 index 45f0e36fbcb7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py deleted file mode 100644 index bb626ad110c9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py deleted file mode 100644 index f9fe6a103eea..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py deleted file mode 100644 index 72b244c54958..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py deleted file mode 100644 index bc60e3d779cf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py deleted file mode 100644 index 744bb671e334..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py deleted file mode 100644 index 346621184099..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py deleted file mode 100644 index 5c26d14108e9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py deleted file mode 100644 index ca175cbc639e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py deleted file mode 100644 index 00d561665ef6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_asset(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py deleted file mode 100644 index b5e8b7ec2b41..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = client.get_asset(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py deleted file mode 100644 index 3188ccf8d140..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_environment(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py deleted file mode 100644 index 73399effb4c6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_environment(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py deleted file mode 100644 index cd8194f96353..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py deleted file mode 100644 index 79102a31e196..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py deleted file mode 100644 index 0d71549766aa..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lake(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py deleted file mode 100644 index 6089d0fd3080..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = client.get_lake(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py deleted file mode 100644 index a80b63d8165b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py deleted file mode 100644 index ac88e7053402..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py deleted file mode 100644 index dae75bef30b9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = await client.get_zone(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py deleted file mode 100644 index bd09ecb2c2be..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = client.get_zone(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py deleted file mode 100644 index 90f53390f676..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssetActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssetActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssetActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py deleted file mode 100644 index a449f1cb65db..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssetActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssetActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssetActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py deleted file mode 100644 index 8e07a6a1e784..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py deleted file mode 100644 index 484a7db1eaeb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py deleted file mode 100644 index 164415a6498b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEnvironments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListEnvironments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListEnvironments_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py deleted file mode 100644 index 79b7fd642ab6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEnvironments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListEnvironments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListEnvironments_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py deleted file mode 100644 index 2de64cc90520..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py deleted file mode 100644 index 708192b9bd03..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py deleted file mode 100644 index 3a9296185703..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakeActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakeActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakeActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py deleted file mode 100644 index 9b3333ce8555..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakeActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakeActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakeActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py deleted file mode 100644 index b312e9544b9a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py deleted file mode 100644 index e2fc0d0e6b0d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py deleted file mode 100644 index 0b265969c08f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListSessions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py deleted file mode 100644 index 7fc0d0de9675..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListSessions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py deleted file mode 100644 index 3ebe50888279..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListTasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py deleted file mode 100644 index b6a69abd53aa..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListTasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py deleted file mode 100644 index 55bf491493c6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZoneActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZoneActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZoneActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py deleted file mode 100644 index b1a4cd7d39a9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZoneActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZoneActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZoneActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py deleted file mode 100644 index 4387ef13b0bf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZones -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZones_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZones_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py deleted file mode 100644 index d27df0d9fee2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZones -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZones_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZones_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py deleted file mode 100644 index 9d9d81c26094..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_RunTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.run_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_RunTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py deleted file mode 100644 index 53e68cc087c8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_RunTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = client.run_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_RunTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py deleted file mode 100644 index 2e52a26e6a0c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py deleted file mode 100644 index 89edd8ed6676..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py deleted file mode 100644 index 64b312afdd8b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py deleted file mode 100644 index 6475b2715973..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py deleted file mode 100644 index 2b5c336fa65e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py deleted file mode 100644 index ab9737be9bd8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py deleted file mode 100644 index 486735063aed..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py deleted file mode 100644 index 245be1d6a4ee..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py deleted file mode 100644 index fae1f8d4a5f0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py deleted file mode 100644 index d12cf99292a0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py deleted file mode 100644 index 646999836556..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreateEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = await client.create_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py deleted file mode 100644 index 93fa897714bf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreateEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = client.create_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py deleted file mode 100644 index b26600eefc21..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreatePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = await client.create_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreatePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py deleted file mode 100644 index 5b7dc07cd1cb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreatePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = client.create_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreatePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py deleted file mode 100644 index 299fff1ade47..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeleteEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - await client.delete_entity(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeleteEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py deleted file mode 100644 index c89c6dbeac1f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeleteEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - client.delete_entity(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeleteEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py deleted file mode 100644 index e3d17ce188a7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeletePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_partition(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeletePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py deleted file mode 100644 index 8bda087a6447..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeletePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_partition(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeletePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py deleted file mode 100644 index 040c39c0a4c5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py deleted file mode 100644 index 5ee30bf5fa57..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = client.get_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py deleted file mode 100644 index b8173acc5686..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetPartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetPartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py deleted file mode 100644 index 2545e37f20a7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetPartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetPartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py deleted file mode 100644 index 515797203908..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListEntities_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py deleted file mode 100644 index ba07568fef8d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListEntities_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py deleted file mode 100644 index b768b015e2f4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListPartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListPartitions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py deleted file mode 100644 index ae571b8a8773..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListPartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListPartitions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py deleted file mode 100644 index bd878e0fb8e7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_UpdateEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = await client.update_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_UpdateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py deleted file mode 100644 index 5362969247ef..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_UpdateEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = client.update_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_UpdateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json deleted file mode 100644 index a7eb15b2cc0d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ /dev/null @@ -1,16421 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.dataplex.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-dataplex", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CancelMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_metadata_job" - }, - "description": "Sample for CancelMetadataJob", - "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CancelMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_metadata_job" - }, - "description": "Sample for CancelMetadataJob", - "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "aspect_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_aspect_type" - }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "aspect_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_aspect_type" - }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "entry_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_type" - }, - "description": "Sample for CreateEntryType", - "file": "dataplex_v1_generated_catalog_service_create_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "entry_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_type" - }, - "description": "Sample for CreateEntryType", - "file": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "dataplex_v1_generated_catalog_service_create_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "dataplex_v1_generated_catalog_service_create_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" - }, - { - "name": "metadata_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_metadata_job" - }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" - }, - { - "name": "metadata_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_metadata_job" - }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_aspect_type" - }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_aspect_type" - }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_type" - }, - "description": "Sample for DeleteEntryType", - "file": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_type" - }, - "description": "Sample for DeleteEntryType", - "file": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "dataplex_v1_generated_catalog_service_delete_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "dataplex_v1_generated_catalog_service_delete_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" - }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" - }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" - }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" - }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.MetadataJob", - "shortName": "get_metadata_job" - }, - "description": "Sample for GetMetadataJob", - "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.MetadataJob", - "shortName": "get_metadata_job" - }, - "description": "Sample for GetMetadataJob", - "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_aspect_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListAspectTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager", - "shortName": "list_aspect_types" - }, - "description": "Sample for ListAspectTypes", - "file": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListAspectTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager", - "shortName": "list_aspect_types" - }, - "description": "Sample for ListAspectTypes", - "file": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "dataplex_v1_generated_catalog_service_list_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "dataplex_v1_generated_catalog_service_list_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_groups", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager", - "shortName": "list_entry_types" - }, - "description": "Sample for ListEntryTypes", - "file": "dataplex_v1_generated_catalog_service_list_entry_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager", - "shortName": "list_entry_types" - }, - "description": "Sample for ListEntryTypes", - "file": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListMetadataJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", - "shortName": "list_metadata_jobs" - }, - "description": "Sample for ListMetadataJobs", - "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListMetadataJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", - "shortName": "list_metadata_jobs" - }, - "description": "Sample for ListMetadataJobs", - "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.lookup_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "dataplex_v1_generated_catalog_service_lookup_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_lookup_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.search_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "SearchEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager", - "shortName": "search_entries" - }, - "description": "Sample for SearchEntries", - "file": "dataplex_v1_generated_catalog_service_search_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_search_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.search_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "SearchEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager", - "shortName": "search_entries" - }, - "description": "Sample for SearchEntries", - "file": "dataplex_v1_generated_catalog_service_search_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_search_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_aspect_type" - }, - "description": "Sample for UpdateAspectType", - "file": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_aspect_type" - }, - "description": "Sample for UpdateAspectType", - "file": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_update_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_entry_type" - }, - "description": "Sample for UpdateEntryType", - "file": "dataplex_v1_generated_catalog_service_update_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_entry_type" - }, - "description": "Sample for UpdateEntryType", - "file": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "dataplex_v1_generated_catalog_service_update_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "dataplex_v1_generated_catalog_service_update_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.create_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "CreateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "create_content" - }, - "description": "Sample for CreateContent", - "file": "dataplex_v1_generated_content_service_create_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_CreateContent_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_create_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.create_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "CreateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "create_content" - }, - "description": "Sample for CreateContent", - "file": "dataplex_v1_generated_content_service_create_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_CreateContent_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_create_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.delete_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "DeleteContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_content" - }, - "description": "Sample for DeleteContent", - "file": "dataplex_v1_generated_content_service_delete_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_delete_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.delete_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "DeleteContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_content" - }, - "description": "Sample for DeleteContent", - "file": "dataplex_v1_generated_content_service_delete_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_delete_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "get_content" - }, - "description": "Sample for GetContent", - "file": "dataplex_v1_generated_content_service_get_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "get_content" - }, - "description": "Sample for GetContent", - "file": "dataplex_v1_generated_content_service_get_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "dataplex_v1_generated_content_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "dataplex_v1_generated_content_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.list_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "ListContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager", - "shortName": "list_content" - }, - "description": "Sample for ListContent", - "file": "dataplex_v1_generated_content_service_list_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_ListContent_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_list_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.list_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "ListContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager", - "shortName": "list_content" - }, - "description": "Sample for ListContent", - "file": "dataplex_v1_generated_content_service_list_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_ListContent_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_list_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "dataplex_v1_generated_content_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "dataplex_v1_generated_content_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "dataplex_v1_generated_content_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.update_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "UpdateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "update_content" - }, - "description": "Sample for UpdateContent", - "file": "dataplex_v1_generated_content_service_update_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_update_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.update_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "UpdateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "update_content" - }, - "description": "Sample for UpdateContent", - "file": "dataplex_v1_generated_content_service_update_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_update_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.create_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "CreateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "data_scan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_scan" - }, - "description": "Sample for CreateDataScan", - "file": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "CreateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "data_scan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_scan" - }, - "description": "Sample for CreateDataScan", - "file": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.delete_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "DeleteDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_scan" - }, - "description": "Sample for DeleteDataScan", - "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "DeleteDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_scan" - }, - "description": "Sample for DeleteDataScan", - "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.generate_data_quality_rules", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GenerateDataQualityRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", - "shortName": "generate_data_quality_rules" - }, - "description": "Sample for GenerateDataQualityRules", - "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GenerateDataQualityRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", - "shortName": "generate_data_quality_rules" - }, - "description": "Sample for GenerateDataQualityRules", - "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScanJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScanJob", - "shortName": "get_data_scan_job" - }, - "description": "Sample for GetDataScanJob", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScanJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScanJob", - "shortName": "get_data_scan_job" - }, - "description": "Sample for GetDataScanJob", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScan", - "shortName": "get_data_scan" - }, - "description": "Sample for GetDataScan", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScan", - "shortName": "get_data_scan" - }, - "description": "Sample for GetDataScan", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scan_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScanJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager", - "shortName": "list_data_scan_jobs" - }, - "description": "Sample for ListDataScanJobs", - "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScanJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager", - "shortName": "list_data_scan_jobs" - }, - "description": "Sample for ListDataScanJobs", - "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scans", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager", - "shortName": "list_data_scans" - }, - "description": "Sample for ListDataScans", - "file": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager", - "shortName": "list_data_scans" - }, - "description": "Sample for ListDataScans", - "file": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.run_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "RunDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", - "shortName": "run_data_scan" - }, - "description": "Sample for RunDataScan", - "file": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "RunDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", - "shortName": "run_data_scan" - }, - "description": "Sample for RunDataScan", - "file": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.update_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "UpdateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_scan" - }, - "description": "Sample for UpdateDataScan", - "file": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "UpdateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_scan" - }, - "description": "Sample for UpdateDataScan", - "file": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "data_attribute_binding_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_attribute_binding" - }, - "description": "Sample for CreateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "data_attribute_binding_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_attribute_binding" - }, - "description": "Sample for CreateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "data_attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_attribute" - }, - "description": "Sample for CreateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "data_attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_attribute" - }, - "description": "Sample for CreateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "data_taxonomy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_taxonomy" - }, - "description": "Sample for CreateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "data_taxonomy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_taxonomy" - }, - "description": "Sample for CreateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_attribute_binding" - }, - "description": "Sample for DeleteDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_attribute_binding" - }, - "description": "Sample for DeleteDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_attribute" - }, - "description": "Sample for DeleteDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_attribute" - }, - "description": "Sample for DeleteDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_taxonomy" - }, - "description": "Sample for DeleteDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_taxonomy" - }, - "description": "Sample for DeleteDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", - "shortName": "get_data_attribute_binding" - }, - "description": "Sample for GetDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", - "shortName": "get_data_attribute_binding" - }, - "description": "Sample for GetDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttribute", - "shortName": "get_data_attribute" - }, - "description": "Sample for GetDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttribute", - "shortName": "get_data_attribute" - }, - "description": "Sample for GetDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", - "shortName": "get_data_taxonomy" - }, - "description": "Sample for GetDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", - "shortName": "get_data_taxonomy" - }, - "description": "Sample for GetDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attribute_bindings", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributeBindings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager", - "shortName": "list_data_attribute_bindings" - }, - "description": "Sample for ListDataAttributeBindings", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributeBindings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager", - "shortName": "list_data_attribute_bindings" - }, - "description": "Sample for ListDataAttributeBindings", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attributes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager", - "shortName": "list_data_attributes" - }, - "description": "Sample for ListDataAttributes", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager", - "shortName": "list_data_attributes" - }, - "description": "Sample for ListDataAttributes", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_taxonomies", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager", - "shortName": "list_data_taxonomies" - }, - "description": "Sample for ListDataTaxonomies", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager", - "shortName": "list_data_taxonomies" - }, - "description": "Sample for ListDataTaxonomies", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_attribute_binding" - }, - "description": "Sample for UpdateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_attribute_binding" - }, - "description": "Sample for UpdateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_attribute" - }, - "description": "Sample for UpdateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_attribute" - }, - "description": "Sample for UpdateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_taxonomy" - }, - "description": "Sample for UpdateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_taxonomy" - }, - "description": "Sample for UpdateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataplex_v1_generated_dataplex_service_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.cancel_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "asset_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_asset" - }, - "description": "Sample for CreateAsset", - "file": "dataplex_v1_generated_dataplex_service_create_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "asset_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_asset" - }, - "description": "Sample for CreateAsset", - "file": "dataplex_v1_generated_dataplex_service_create_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "environment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_environment" - }, - "description": "Sample for CreateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_create_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "environment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_environment" - }, - "description": "Sample for CreateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_create_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "lake_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_lake" - }, - "description": "Sample for CreateLake", - "file": "dataplex_v1_generated_dataplex_service_create_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "lake_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_lake" - }, - "description": "Sample for CreateLake", - "file": "dataplex_v1_generated_dataplex_service_create_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_task" - }, - "description": "Sample for CreateTask", - "file": "dataplex_v1_generated_dataplex_service_create_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_task" - }, - "description": "Sample for CreateTask", - "file": "dataplex_v1_generated_dataplex_service_create_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "zone_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_zone" - }, - "description": "Sample for CreateZone", - "file": "dataplex_v1_generated_dataplex_service_create_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "zone_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_zone" - }, - "description": "Sample for CreateZone", - "file": "dataplex_v1_generated_dataplex_service_create_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_asset" - }, - "description": "Sample for DeleteAsset", - "file": "dataplex_v1_generated_dataplex_service_delete_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_asset" - }, - "description": "Sample for DeleteAsset", - "file": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_environment" - }, - "description": "Sample for DeleteEnvironment", - "file": "dataplex_v1_generated_dataplex_service_delete_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_environment" - }, - "description": "Sample for DeleteEnvironment", - "file": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_lake" - }, - "description": "Sample for DeleteLake", - "file": "dataplex_v1_generated_dataplex_service_delete_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_lake" - }, - "description": "Sample for DeleteLake", - "file": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_task" - }, - "description": "Sample for DeleteTask", - "file": "dataplex_v1_generated_dataplex_service_delete_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_task" - }, - "description": "Sample for DeleteTask", - "file": "dataplex_v1_generated_dataplex_service_delete_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_zone" - }, - "description": "Sample for DeleteZone", - "file": "dataplex_v1_generated_dataplex_service_delete_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_zone" - }, - "description": "Sample for DeleteZone", - "file": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Asset", - "shortName": "get_asset" - }, - "description": "Sample for GetAsset", - "file": "dataplex_v1_generated_dataplex_service_get_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Asset", - "shortName": "get_asset" - }, - "description": "Sample for GetAsset", - "file": "dataplex_v1_generated_dataplex_service_get_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Environment", - "shortName": "get_environment" - }, - "description": "Sample for GetEnvironment", - "file": "dataplex_v1_generated_dataplex_service_get_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Environment", - "shortName": "get_environment" - }, - "description": "Sample for GetEnvironment", - "file": "dataplex_v1_generated_dataplex_service_get_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataplex_v1_generated_dataplex_service_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataplex_v1_generated_dataplex_service_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Lake", - "shortName": "get_lake" - }, - "description": "Sample for GetLake", - "file": "dataplex_v1_generated_dataplex_service_get_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetLake_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Lake", - "shortName": "get_lake" - }, - "description": "Sample for GetLake", - "file": "dataplex_v1_generated_dataplex_service_get_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetLake_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "dataplex_v1_generated_dataplex_service_get_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "dataplex_v1_generated_dataplex_service_get_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Zone", - "shortName": "get_zone" - }, - "description": "Sample for GetZone", - "file": "dataplex_v1_generated_dataplex_service_get_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetZone_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Zone", - "shortName": "get_zone" - }, - "description": "Sample for GetZone", - "file": "dataplex_v1_generated_dataplex_service_get_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetZone_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_asset_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssetActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager", - "shortName": "list_asset_actions" - }, - "description": "Sample for ListAssetActions", - "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssetActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager", - "shortName": "list_asset_actions" - }, - "description": "Sample for ListAssetActions", - "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_assets", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "dataplex_v1_generated_dataplex_service_list_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_assets", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "dataplex_v1_generated_dataplex_service_list_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_environments", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListEnvironments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager", - "shortName": "list_environments" - }, - "description": "Sample for ListEnvironments", - "file": "dataplex_v1_generated_dataplex_service_list_environments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_environments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_environments", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListEnvironments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager", - "shortName": "list_environments" - }, - "description": "Sample for ListEnvironments", - "file": "dataplex_v1_generated_dataplex_service_list_environments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_environments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataplex_v1_generated_dataplex_service_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lake_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakeActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager", - "shortName": "list_lake_actions" - }, - "description": "Sample for ListLakeActions", - "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakeActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager", - "shortName": "list_lake_actions" - }, - "description": "Sample for ListLakeActions", - "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lakes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager", - "shortName": "list_lakes" - }, - "description": "Sample for ListLakes", - "file": "dataplex_v1_generated_dataplex_service_list_lakes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lakes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager", - "shortName": "list_lakes" - }, - "description": "Sample for ListLakes", - "file": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_sessions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "dataplex_v1_generated_dataplex_service_list_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_tasks", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "dataplex_v1_generated_dataplex_service_list_tasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_tasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zone_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZoneActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager", - "shortName": "list_zone_actions" - }, - "description": "Sample for ListZoneActions", - "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZoneActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager", - "shortName": "list_zone_actions" - }, - "description": "Sample for ListZoneActions", - "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zones", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZones" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZonesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager", - "shortName": "list_zones" - }, - "description": "Sample for ListZones", - "file": "dataplex_v1_generated_dataplex_service_list_zones_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZones_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zones_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zones", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZones" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZonesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager", - "shortName": "list_zones" - }, - "description": "Sample for ListZones", - "file": "dataplex_v1_generated_dataplex_service_list_zones_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZones_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zones_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.run_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "RunTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", - "shortName": "run_task" - }, - "description": "Sample for RunTask", - "file": "dataplex_v1_generated_dataplex_service_run_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_RunTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_run_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.run_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "RunTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", - "shortName": "run_task" - }, - "description": "Sample for RunTask", - "file": "dataplex_v1_generated_dataplex_service_run_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_RunTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_run_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_asset" - }, - "description": "Sample for UpdateAsset", - "file": "dataplex_v1_generated_dataplex_service_update_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_asset" - }, - "description": "Sample for UpdateAsset", - "file": "dataplex_v1_generated_dataplex_service_update_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_environment" - }, - "description": "Sample for UpdateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_update_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_environment" - }, - "description": "Sample for UpdateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_update_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_lake" - }, - "description": "Sample for UpdateLake", - "file": "dataplex_v1_generated_dataplex_service_update_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_lake" - }, - "description": "Sample for UpdateLake", - "file": "dataplex_v1_generated_dataplex_service_update_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_task" - }, - "description": "Sample for UpdateTask", - "file": "dataplex_v1_generated_dataplex_service_update_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_task" - }, - "description": "Sample for UpdateTask", - "file": "dataplex_v1_generated_dataplex_service_update_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_zone" - }, - "description": "Sample for UpdateZone", - "file": "dataplex_v1_generated_dataplex_service_update_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_zone" - }, - "description": "Sample for UpdateZone", - "file": "dataplex_v1_generated_dataplex_service_update_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entity", - "type": "google.cloud.dataplex_v1.types.Entity" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "create_entity" - }, - "description": "Sample for CreateEntity", - "file": "dataplex_v1_generated_metadata_service_create_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entity", - "type": "google.cloud.dataplex_v1.types.Entity" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "create_entity" - }, - "description": "Sample for CreateEntity", - "file": "dataplex_v1_generated_metadata_service_create_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreatePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "partition", - "type": "google.cloud.dataplex_v1.types.Partition" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "create_partition" - }, - "description": "Sample for CreatePartition", - "file": "dataplex_v1_generated_metadata_service_create_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreatePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "partition", - "type": "google.cloud.dataplex_v1.types.Partition" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "create_partition" - }, - "description": "Sample for CreatePartition", - "file": "dataplex_v1_generated_metadata_service_create_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeleteEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entity" - }, - "description": "Sample for DeleteEntity", - "file": "dataplex_v1_generated_metadata_service_delete_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeleteEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entity" - }, - "description": "Sample for DeleteEntity", - "file": "dataplex_v1_generated_metadata_service_delete_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeletePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_partition" - }, - "description": "Sample for DeletePartition", - "file": "dataplex_v1_generated_metadata_service_delete_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeletePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_partition" - }, - "description": "Sample for DeletePartition", - "file": "dataplex_v1_generated_metadata_service_delete_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "get_entity" - }, - "description": "Sample for GetEntity", - "file": "dataplex_v1_generated_metadata_service_get_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "get_entity" - }, - "description": "Sample for GetEntity", - "file": "dataplex_v1_generated_metadata_service_get_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetPartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "get_partition" - }, - "description": "Sample for GetPartition", - "file": "dataplex_v1_generated_metadata_service_get_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetPartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "get_partition" - }, - "description": "Sample for GetPartition", - "file": "dataplex_v1_generated_metadata_service_get_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_entities", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager", - "shortName": "list_entities" - }, - "description": "Sample for ListEntities", - "file": "dataplex_v1_generated_metadata_service_list_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_entities", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager", - "shortName": "list_entities" - }, - "description": "Sample for ListEntities", - "file": "dataplex_v1_generated_metadata_service_list_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_partitions", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListPartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager", - "shortName": "list_partitions" - }, - "description": "Sample for ListPartitions", - "file": "dataplex_v1_generated_metadata_service_list_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListPartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager", - "shortName": "list_partitions" - }, - "description": "Sample for ListPartitions", - "file": "dataplex_v1_generated_metadata_service_list_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.update_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "UpdateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "update_entity" - }, - "description": "Sample for UpdateEntity", - "file": "dataplex_v1_generated_metadata_service_update_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_update_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.update_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "UpdateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "update_entity" - }, - "description": "Sample for UpdateEntity", - "file": "dataplex_v1_generated_metadata_service_update_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_update_entity_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py deleted file mode 100644 index 6681941351bf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py +++ /dev/null @@ -1,275 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dataplexCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('name', ), - 'cancel_metadata_job': ('name', ), - 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), - 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), - 'create_content': ('parent', 'content', 'validate_only', ), - 'create_data_attribute': ('parent', 'data_attribute_id', 'data_attribute', 'validate_only', ), - 'create_data_attribute_binding': ('parent', 'data_attribute_binding_id', 'data_attribute_binding', 'validate_only', ), - 'create_data_scan': ('parent', 'data_scan', 'data_scan_id', 'validate_only', ), - 'create_data_taxonomy': ('parent', 'data_taxonomy_id', 'data_taxonomy', 'validate_only', ), - 'create_entity': ('parent', 'entity', 'validate_only', ), - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), - 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), - 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), - 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), - 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), - 'create_partition': ('parent', 'partition', 'validate_only', ), - 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), - 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), - 'delete_aspect_type': ('name', 'etag', ), - 'delete_asset': ('name', ), - 'delete_content': ('name', ), - 'delete_data_attribute': ('name', 'etag', ), - 'delete_data_attribute_binding': ('name', 'etag', ), - 'delete_data_scan': ('name', ), - 'delete_data_taxonomy': ('name', 'etag', ), - 'delete_entity': ('name', 'etag', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'etag', ), - 'delete_entry_type': ('name', 'etag', ), - 'delete_environment': ('name', ), - 'delete_lake': ('name', ), - 'delete_partition': ('name', 'etag', ), - 'delete_task': ('name', ), - 'delete_zone': ('name', ), - 'generate_data_quality_rules': ('name', ), - 'get_aspect_type': ('name', ), - 'get_asset': ('name', ), - 'get_content': ('name', 'view', ), - 'get_data_attribute': ('name', ), - 'get_data_attribute_binding': ('name', ), - 'get_data_scan': ('name', 'view', ), - 'get_data_scan_job': ('name', 'view', ), - 'get_data_taxonomy': ('name', ), - 'get_entity': ('name', 'view', ), - 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), - 'get_entry_group': ('name', ), - 'get_entry_type': ('name', ), - 'get_environment': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_job': ('name', ), - 'get_lake': ('name', ), - 'get_metadata_job': ('name', ), - 'get_partition': ('name', ), - 'get_task': ('name', ), - 'get_zone': ('name', ), - 'list_aspect_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_asset_actions': ('parent', 'page_size', 'page_token', ), - 'list_assets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_content': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_data_attribute_bindings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_attributes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_scan_jobs': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_data_scans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_taxonomies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_entities': ('parent', 'view', 'page_size', 'page_token', 'filter', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_jobs': ('parent', 'page_size', 'page_token', ), - 'list_lake_actions': ('parent', 'page_size', 'page_token', ), - 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_zone_actions': ('parent', 'page_size', 'page_token', ), - 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'lookup_entry': ('name', 'entry', 'view', 'aspect_types', 'paths', ), - 'run_data_scan': ('name', ), - 'run_task': ('name', 'labels', 'args', ), - 'search_entries': ('name', 'query', 'page_size', 'page_token', 'order_by', 'scope', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_aspect_type': ('aspect_type', 'update_mask', 'validate_only', ), - 'update_asset': ('update_mask', 'asset', 'validate_only', ), - 'update_content': ('update_mask', 'content', 'validate_only', ), - 'update_data_attribute': ('update_mask', 'data_attribute', 'validate_only', ), - 'update_data_attribute_binding': ('update_mask', 'data_attribute_binding', 'validate_only', ), - 'update_data_scan': ('data_scan', 'update_mask', 'validate_only', ), - 'update_data_taxonomy': ('update_mask', 'data_taxonomy', 'validate_only', ), - 'update_entity': ('entity', 'validate_only', ), - 'update_entry': ('entry', 'update_mask', 'allow_missing', 'delete_missing_aspects', 'aspect_keys', ), - 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), - 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), - 'update_environment': ('update_mask', 'environment', 'validate_only', ), - 'update_lake': ('update_mask', 'lake', 'validate_only', ), - 'update_task': ('update_mask', 'task', 'validate_only', ), - 'update_zone': ('update_mask', 'zone', 'validate_only', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dataplexCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dataplex client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/setup.py b/owl-bot-staging/google-cloud-dataplex/v1/setup.py deleted file mode 100644 index d9494d49423d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dataplex' - - -description = "Google Cloud Dataplex API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/dataplex/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt deleted file mode 100644 index a81fb6bcd05c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py deleted file mode 100644 index 801c1fbe291a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ /dev/null @@ -1,13193 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceAsyncClient -from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceClient -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.services.catalog_service import transports -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CatalogServiceClient._get_default_mtls_endpoint(None) is None - assert CatalogServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CatalogServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - CatalogServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CatalogServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CatalogServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - CatalogServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert CatalogServiceClient._get_client_cert_source(None, False) is None - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert CatalogServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert CatalogServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert CatalogServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert CatalogServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert CatalogServiceClient._get_universe_domain(None, None) == CatalogServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - CatalogServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CatalogServiceClient, "grpc"), - (CatalogServiceAsyncClient, "grpc_asyncio"), -]) -def test_catalog_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CatalogServiceGrpcTransport, "grpc"), - (transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_catalog_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CatalogServiceClient, "grpc"), - (CatalogServiceAsyncClient, "grpc_asyncio"), -]) -def test_catalog_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_catalog_service_client_get_transport_class(): - transport = CatalogServiceClient.get_transport_class() - available_transports = [ - transports.CatalogServiceGrpcTransport, - ] - assert transport in available_transports - - transport = CatalogServiceClient.get_transport_class("grpc") - assert transport == transports.CatalogServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test_catalog_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "true"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "false"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_catalog_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - CatalogServiceClient, CatalogServiceAsyncClient -]) -@mock.patch.object(CatalogServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceAsyncClient)) -def test_catalog_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - CatalogServiceClient, CatalogServiceAsyncClient -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test_catalog_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_catalog_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_catalog_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_catalog_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CatalogServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_catalog_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryTypeRequest, - dict, -]) -def test_create_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryTypeRequest( - parent='parent_value', - entry_type_id='entry_type_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryTypeRequest( - parent='parent_value', - entry_type_id='entry_type_id_value', - ) - -def test_create_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc - request = {} - client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry_type] = mock_rpc - - request = {} - await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_entry_type_async_from_dict(): - await test_create_entry_type_async(request_type=dict) - -def test_create_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_type( - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].entry_type_id - mock_val = 'entry_type_id_value' - assert arg == mock_val - - -def test_create_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_type( - catalog.CreateEntryTypeRequest(), - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_type( - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].entry_type_id - mock_val = 'entry_type_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_type( - catalog.CreateEntryTypeRequest(), - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryTypeRequest, - dict, -]) -def test_update_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryTypeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryTypeRequest( - ) - -def test_update_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc - request = {} - client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry_type] = mock_rpc - - request = {} - await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_entry_type_async_from_dict(): - await test_update_entry_type_async(request_type=dict) - -def test_update_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryTypeRequest() - - request.entry_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_type.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryTypeRequest() - - request.entry_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_type.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_type( - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_type( - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryTypeRequest, - dict, -]) -def test_delete_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryTypeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryTypeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc - request = {} - client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry_type] = mock_rpc - - request = {} - await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_entry_type_async_from_dict(): - await test_delete_entry_type_async(request_type=dict) - -def test_delete_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryTypesRequest, - dict, -]) -def test_list_entry_types(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_entry_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntryTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entry_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntryTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_entry_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc - request = {} - client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entry_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entry_types] = mock_rpc - - request = {} - await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entry_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryTypesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_entry_types_async_from_dict(): - await test_list_entry_types_async(request_type=dict) - -def test_list_entry_types_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_types_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) - await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_types_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_types_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_types( - catalog.ListEntryTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_types_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_types_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_types( - catalog.ListEntryTypesRequest(), - parent='parent_value', - ) - - -def test_list_entry_types_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_types(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryType) - for i in results) -def test_list_entry_types_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_types_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.EntryType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_types_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_types(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryTypeRequest, - dict, -]) -def test_get_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - ) - response = client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.type_aliases == ['type_aliases_value'] - assert response.platform == 'platform_value' - assert response.system == 'system_value' - - -def test_get_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryTypeRequest( - name='name_value', - ) - -def test_get_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc - request = {} - client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry_type] = mock_rpc - - request = {} - await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - )) - response = await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.type_aliases == ['type_aliases_value'] - assert response.platform == 'platform_value' - assert response.system == 'system_value' - - -@pytest.mark.asyncio -async def test_get_entry_type_async_from_dict(): - await test_get_entry_type_async(request_type=dict) - -def test_get_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) - await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_type( - catalog.GetEntryTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_type( - catalog.GetEntryTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateAspectTypeRequest, - dict, -]) -def test_create_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateAspectTypeRequest( - parent='parent_value', - aspect_type_id='aspect_type_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateAspectTypeRequest( - parent='parent_value', - aspect_type_id='aspect_type_id_value', - ) - -def test_create_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc - request = {} - client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_aspect_type] = mock_rpc - - request = {} - await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_aspect_type_async_from_dict(): - await test_create_aspect_type_async(request_type=dict) - -def test_create_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateAspectTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateAspectTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_aspect_type( - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].aspect_type_id - mock_val = 'aspect_type_id_value' - assert arg == mock_val - - -def test_create_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - -@pytest.mark.asyncio -async def test_create_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_aspect_type( - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].aspect_type_id - mock_val = 'aspect_type_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateAspectTypeRequest, - dict, -]) -def test_update_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateAspectTypeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateAspectTypeRequest( - ) - -def test_update_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc - request = {} - client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_aspect_type] = mock_rpc - - request = {} - await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_aspect_type_async_from_dict(): - await test_update_aspect_type_async(request_type=dict) - -def test_update_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateAspectTypeRequest() - - request.aspect_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'aspect_type.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateAspectTypeRequest() - - request.aspect_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'aspect_type.name=name_value', - ) in kw['metadata'] - - -def test_update_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_aspect_type( - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_aspect_type( - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteAspectTypeRequest, - dict, -]) -def test_delete_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteAspectTypeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteAspectTypeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc - request = {} - client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_aspect_type] = mock_rpc - - request = {} - await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_aspect_type_async_from_dict(): - await test_delete_aspect_type_async(request_type=dict) - -def test_delete_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListAspectTypesRequest, - dict, -]) -def test_list_aspect_types(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListAspectTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_aspect_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListAspectTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_aspect_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListAspectTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_aspect_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_aspect_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc - request = {} - client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_aspect_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_aspect_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_aspect_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_aspect_types] = mock_rpc - - request = {} - await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_aspect_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_aspect_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListAspectTypesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListAspectTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_aspect_types_async_from_dict(): - await test_list_aspect_types_async(request_type=dict) - -def test_list_aspect_types_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListAspectTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_aspect_types_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListAspectTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) - await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_aspect_types_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_aspect_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_aspect_types_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_aspect_types( - catalog.ListAspectTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_aspect_types_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_aspect_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_aspect_types_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_aspect_types( - catalog.ListAspectTypesRequest(), - parent='parent_value', - ) - - -def test_list_aspect_types_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_aspect_types(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.AspectType) - for i in results) -def test_list_aspect_types_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_aspect_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_aspect_types_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_aspect_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.AspectType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_aspect_types_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_aspect_types(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetAspectTypeRequest, - dict, -]) -def test_get_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - response = client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -def test_get_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetAspectTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetAspectTypeRequest( - name='name_value', - ) - -def test_get_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc - request = {} - client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_aspect_type] = mock_rpc - - request = {} - await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - response = await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.asyncio -async def test_get_aspect_type_async_from_dict(): - await test_get_aspect_type_async(request_type=dict) - -def test_get_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) - await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryGroupRequest, - dict, -]) -def test_create_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryGroupRequest( - parent='parent_value', - entry_group_id='entry_group_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryGroupRequest( - parent='parent_value', - entry_group_id='entry_group_id_value', - ) - -def test_create_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc - request = {} - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry_group] = mock_rpc - - request = {} - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_entry_group_async_from_dict(): - await test_create_entry_group_async(request_type=dict) - -def test_create_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_group( - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - - -def test_create_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_group( - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryGroupRequest, - dict, -]) -def test_update_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryGroupRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryGroupRequest( - ) - -def test_update_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc - request = {} - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry_group] = mock_rpc - - request = {} - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_entry_group_async_from_dict(): - await test_update_entry_group_async(request_type=dict) - -def test_update_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_group( - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_group( - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryGroupRequest, - dict, -]) -def test_delete_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryGroupRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryGroupRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc - request = {} - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry_group] = mock_rpc - - request = {} - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_entry_group_async_from_dict(): - await test_delete_entry_group_async(request_type=dict) - -def test_delete_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryGroupsRequest, - dict, -]) -def test_list_entry_groups(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryGroupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_entry_groups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntryGroupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entry_groups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntryGroupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_entry_groups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_groups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc - request = {} - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_groups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_groups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entry_groups in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entry_groups] = mock_rpc - - request = {} - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entry_groups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryGroupsRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryGroupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_from_dict(): - await test_list_entry_groups_async(request_type=dict) - -def test_list_entry_groups_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_groups_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_groups_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_groups( - catalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_groups( - catalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - - -def test_list_entry_groups_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_groups(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryGroup) - for i in results) -def test_list_entry_groups_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_groups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_groups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.EntryGroup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_groups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryGroupRequest, - dict, -]) -def test_get_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - response = client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -def test_get_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryGroupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryGroupRequest( - name='name_value', - ) - -def test_get_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc - request = {} - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry_group] = mock_rpc - - request = {} - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - response = await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.asyncio -async def test_get_entry_group_async_from_dict(): - await test_get_entry_group_async(request_type=dict) - -def test_get_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_group( - catalog.GetEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_group( - catalog.GetEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryRequest, - dict, -]) -def test_create_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_create_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryRequest( - parent='parent_value', - entry_id='entry_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryRequest( - parent='parent_value', - entry_id='entry_id_value', - ) - -def test_create_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc - request = {} - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry] = mock_rpc - - request = {} - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_create_entry_async_from_dict(): - await test_create_entry_async(request_type=dict) - -def test_create_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry( - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - - -def test_create_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry( - catalog.CreateEntryRequest(), - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry( - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry( - catalog.CreateEntryRequest(), - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryRequest, - dict, -]) -def test_update_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_update_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryRequest( - ) - -def test_update_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc - request = {} - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry] = mock_rpc - - request = {} - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_update_entry_async_from_dict(): - await test_update_entry_async(request_type=dict) - -def test_update_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry( - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry( - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryRequest, - dict, -]) -def test_delete_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_delete_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryRequest( - name='name_value', - ) - -def test_delete_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc - request = {} - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry] = mock_rpc - - request = {} - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_delete_entry_async_from_dict(): - await test_delete_entry_async(request_type=dict) - -def test_delete_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry( - catalog.DeleteEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry( - catalog.DeleteEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntriesRequest, - dict, -]) -def test_list_entries(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entries_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entries(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_entries_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc - request = {} - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entries in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entries] = mock_rpc - - request = {} - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntriesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entries_async_from_dict(): - await test_list_entries_async(request_type=dict) - -def test_list_entries_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entries_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entries_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entries_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entries( - catalog.ListEntriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entries_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entries_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entries( - catalog.ListEntriesRequest(), - parent='parent_value', - ) - - -def test_list_entries_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entries(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.Entry) - for i in results) -def test_list_entries_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entries_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.Entry) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entries_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryRequest, - dict, -]) -def test_get_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_get_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryRequest( - name='name_value', - ) - -def test_get_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc - request = {} - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry] = mock_rpc - - request = {} - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_get_entry_async_from_dict(): - await test_get_entry_async(request_type=dict) - -def test_get_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry( - catalog.GetEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry( - catalog.GetEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.LookupEntryRequest, - dict, -]) -def test_lookup_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.LookupEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_lookup_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.LookupEntryRequest( - name='name_value', - entry='entry_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.lookup_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.LookupEntryRequest( - name='name_value', - entry='entry_value', - ) - -def test_lookup_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lookup_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc - request = {} - client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_lookup_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.lookup_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.lookup_entry] = mock_rpc - - request = {} - await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lookup_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.LookupEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.LookupEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_lookup_entry_async_from_dict(): - await test_lookup_entry_async(request_type=dict) - -def test_lookup_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.LookupEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_lookup_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.LookupEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - catalog.SearchEntriesRequest, - dict, -]) -def test_search_entries(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.SearchEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_search_entries_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.SearchEntriesRequest( - name='name_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - scope='scope_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_entries(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.SearchEntriesRequest( - name='name_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - scope='scope_value', - ) - -def test_search_entries_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc - request = {} - client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_entries in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_entries] = mock_rpc - - request = {} - await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.SearchEntriesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.SearchEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesAsyncPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_search_entries_async_from_dict(): - await test_search_entries_async(request_type=dict) - -def test_search_entries_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.SearchEntriesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_entries_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.SearchEntriesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) - await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_search_entries_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_entries( - name='name_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_entries_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_entries( - catalog.SearchEntriesRequest(), - name='name_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_entries_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_entries( - name='name_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_entries_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_entries( - catalog.SearchEntriesRequest(), - name='name_value', - query='query_value', - ) - - -def test_search_entries_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.search_entries(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) - for i in results) -def test_search_entries_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_entries_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_entries_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.CreateMetadataJobRequest, - dict, -]) -def test_create_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateMetadataJobRequest( - parent='parent_value', - metadata_job_id='metadata_job_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateMetadataJobRequest( - parent='parent_value', - metadata_job_id='metadata_job_id_value', - ) - -def test_create_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc - request = {} - client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_metadata_job] = mock_rpc - - request = {} - await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_metadata_job_async_from_dict(): - await test_create_metadata_job_async(request_type=dict) - -def test_create_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateMetadataJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateMetadataJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_metadata_job( - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].metadata_job - mock_val = catalog.MetadataJob(name='name_value') - assert arg == mock_val - arg = args[0].metadata_job_id - mock_val = 'metadata_job_id_value' - assert arg == mock_val - - -def test_create_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_metadata_job( - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].metadata_job - mock_val = catalog.MetadataJob(name='name_value') - assert arg == mock_val - arg = args[0].metadata_job_id - mock_val = 'metadata_job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetMetadataJobRequest, - dict, -]) -def test_get_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - ) - response = client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.type_ == catalog.MetadataJob.Type.IMPORT - - -def test_get_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetMetadataJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetMetadataJobRequest( - name='name_value', - ) - -def test_get_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc - request = {} - client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_metadata_job] = mock_rpc - - request = {} - await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.GetMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - )) - response = await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.type_ == catalog.MetadataJob.Type.IMPORT - - -@pytest.mark.asyncio -async def test_get_metadata_job_async_from_dict(): - await test_get_metadata_job_async(request_type=dict) - -def test_get_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) - await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListMetadataJobsRequest, - dict, -]) -def test_list_metadata_jobs(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListMetadataJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListMetadataJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_metadata_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListMetadataJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_metadata_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_metadata_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc - request = {} - client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_metadata_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_metadata_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_metadata_jobs] = mock_rpc - - request = {} - await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_metadata_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async(transport: str = 'grpc_asyncio', request_type=catalog.ListMetadataJobsRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListMetadataJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_from_dict(): - await test_list_metadata_jobs_async(request_type=dict) - -def test_list_metadata_jobs_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListMetadataJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListMetadataJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) - await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_metadata_jobs_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_metadata_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_metadata_jobs_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_metadata_jobs_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_metadata_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_metadata_jobs_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent='parent_value', - ) - - -def test_list_metadata_jobs_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.MetadataJob) - for i in results) -def test_list_metadata_jobs_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_metadata_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_metadata_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.MetadataJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_metadata_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.CancelMetadataJobRequest, - dict, -]) -def test_cancel_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CancelMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CancelMetadataJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CancelMetadataJobRequest( - name='name_value', - ) - -def test_cancel_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc - request = {} - client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_metadata_job] = mock_rpc - - request = {} - await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CancelMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CancelMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async_from_dict(): - await test_cancel_metadata_job_async(request_type=dict) - -def test_cancel_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CancelMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = None - client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CancelMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CatalogServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CatalogServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = CatalogServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_aspect_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_groups_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.create_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.update_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.get_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lookup_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_metadata_jobs_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = None - client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - )) - await client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_aspect_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_aspect_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - await client.get_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_groups_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_entry_groups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - await client.get_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.create_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.update_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.get_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_lookup_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.search_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - )) - await client.get_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_metadata_jobs_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_metadata_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CatalogServiceGrpcTransport, - ) - -def test_catalog_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CatalogServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_catalog_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CatalogServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_entry_type', - 'update_entry_type', - 'delete_entry_type', - 'list_entry_types', - 'get_entry_type', - 'create_aspect_type', - 'update_aspect_type', - 'delete_aspect_type', - 'list_aspect_types', - 'get_aspect_type', - 'create_entry_group', - 'update_entry_group', - 'delete_entry_group', - 'list_entry_groups', - 'get_entry_group', - 'create_entry', - 'update_entry', - 'delete_entry', - 'list_entries', - 'get_entry', - 'lookup_entry', - 'search_entries', - 'create_metadata_job', - 'get_metadata_job', - 'list_metadata_jobs', - 'cancel_metadata_job', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_catalog_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CatalogServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_catalog_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CatalogServiceTransport() - adc.assert_called_once() - - -def test_catalog_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CatalogServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - ], -) -def test_catalog_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - ], -) -def test_catalog_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CatalogServiceGrpcTransport, grpc_helpers), - (transports.CatalogServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_catalog_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_catalog_service_host_no_port(transport_name): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_catalog_service_host_with_port(transport_name): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_catalog_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CatalogServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_catalog_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CatalogServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_catalog_service_grpc_lro_client(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_catalog_service_grpc_lro_async_client(): - client = CatalogServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_aspect_type_path(): - project = "squid" - location = "clam" - aspect_type = "whelk" - expected = "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) - actual = CatalogServiceClient.aspect_type_path(project, location, aspect_type) - assert expected == actual - - -def test_parse_aspect_type_path(): - expected = { - "project": "octopus", - "location": "oyster", - "aspect_type": "nudibranch", - } - path = CatalogServiceClient.aspect_type_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_aspect_type_path(path) - assert expected == actual - -def test_entry_path(): - project = "cuttlefish" - location = "mussel" - entry_group = "winkle" - entry = "nautilus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - actual = CatalogServiceClient.entry_path(project, location, entry_group, entry) - assert expected == actual - - -def test_parse_entry_path(): - expected = { - "project": "scallop", - "location": "abalone", - "entry_group": "squid", - "entry": "clam", - } - path = CatalogServiceClient.entry_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_path(path) - assert expected == actual - -def test_entry_group_path(): - project = "whelk" - location = "octopus" - entry_group = "oyster" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - actual = CatalogServiceClient.entry_group_path(project, location, entry_group) - assert expected == actual - - -def test_parse_entry_group_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - } - path = CatalogServiceClient.entry_group_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_group_path(path) - assert expected == actual - -def test_entry_type_path(): - project = "winkle" - location = "nautilus" - entry_type = "scallop" - expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) - actual = CatalogServiceClient.entry_type_path(project, location, entry_type) - assert expected == actual - - -def test_parse_entry_type_path(): - expected = { - "project": "abalone", - "location": "squid", - "entry_type": "clam", - } - path = CatalogServiceClient.entry_type_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_type_path(path) - assert expected == actual - -def test_metadata_job_path(): - project = "whelk" - location = "octopus" - metadataJob = "oyster" - expected = "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) - actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) - assert expected == actual - - -def test_parse_metadata_job_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "metadataJob": "mussel", - } - path = CatalogServiceClient.metadata_job_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_metadata_job_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CatalogServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = CatalogServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = CatalogServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = CatalogServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CatalogServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = CatalogServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = CatalogServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = CatalogServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CatalogServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = CatalogServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = CatalogServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py deleted file mode 100644 index 5d28ebbdf192..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py +++ /dev/null @@ -1,5202 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.content_service import ContentServiceAsyncClient -from google.cloud.dataplex_v1.services.content_service import ContentServiceClient -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.services.content_service import transports -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ContentServiceClient._get_default_mtls_endpoint(None) is None - assert ContentServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ContentServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ContentServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ContentServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ContentServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ContentServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ContentServiceClient._get_client_cert_source(None, False) is None - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ContentServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ContentServiceClient._DEFAULT_UNIVERSE - default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ContentServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ContentServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ContentServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ContentServiceClient._get_universe_domain(None, None) == ContentServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ContentServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ContentServiceClient, "grpc"), - (ContentServiceAsyncClient, "grpc_asyncio"), -]) -def test_content_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ContentServiceGrpcTransport, "grpc"), - (transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_content_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ContentServiceClient, "grpc"), - (ContentServiceAsyncClient, "grpc_asyncio"), -]) -def test_content_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_content_service_client_get_transport_class(): - transport = ContentServiceClient.get_transport_class() - available_transports = [ - transports.ContentServiceGrpcTransport, - ] - assert transport in available_transports - - transport = ContentServiceClient.get_transport_class("grpc") - assert transport == transports.ContentServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test_content_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "true"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "false"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_content_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ContentServiceClient, ContentServiceAsyncClient -]) -@mock.patch.object(ContentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceAsyncClient)) -def test_content_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ContentServiceClient, ContentServiceAsyncClient -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test_content_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ContentServiceClient._DEFAULT_UNIVERSE - default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_content_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_content_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_content_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ContentServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_content_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.CreateContentRequest, - dict, -]) -def test_create_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_content.CreateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_create_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_content.CreateContentRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_content.CreateContentRequest( - parent='parent_value', - ) - -def test_create_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_content] = mock_rpc - request = {} - client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_content] = mock_rpc - - request = {} - await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.CreateContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_content.CreateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_content_async_from_dict(): - await test_create_content_async(request_type=dict) - -def test_create_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.CreateContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = analyze.Content() - client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.CreateContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_content( - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - - -def test_create_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_content( - gcd_content.CreateContentRequest(), - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_content( - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_content( - gcd_content.CreateContentRequest(), - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.UpdateContentRequest, - dict, -]) -def test_update_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_content.UpdateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_update_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_content.UpdateContentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_content.UpdateContentRequest( - ) - -def test_update_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_content] = mock_rpc - request = {} - client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_content] = mock_rpc - - request = {} - await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.UpdateContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_content.UpdateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_content_async_from_dict(): - await test_update_content_async(request_type=dict) - -def test_update_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.UpdateContentRequest() - - request.content.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = analyze.Content() - client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'content.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.UpdateContentRequest() - - request.content.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'content.name=name_value', - ) in kw['metadata'] - - -def test_update_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_content( - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_content( - gcd_content.UpdateContentRequest(), - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_content( - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_content( - gcd_content.UpdateContentRequest(), - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - content.DeleteContentRequest, - dict, -]) -def test_delete_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.DeleteContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.DeleteContentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.DeleteContentRequest( - name='name_value', - ) - -def test_delete_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc - request = {} - client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_content] = mock_rpc - - request = {} - await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_content_async(transport: str = 'grpc_asyncio', request_type=content.DeleteContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.DeleteContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_content_async_from_dict(): - await test_delete_content_async(request_type=dict) - -def test_delete_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.DeleteContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = None - client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.DeleteContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_content( - content.DeleteContentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_content( - content.DeleteContentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - content.GetContentRequest, - dict, -]) -def test_get_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.GetContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_get_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.GetContentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.GetContentRequest( - name='name_value', - ) - -def test_get_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_content] = mock_rpc - request = {} - client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_content] = mock_rpc - - request = {} - await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_content_async(transport: str = 'grpc_asyncio', request_type=content.GetContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.GetContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_content_async_from_dict(): - await test_get_content_async(request_type=dict) - -def test_get_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.GetContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = analyze.Content() - client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.GetContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_content( - content.GetContentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_content( - content.GetContentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - content.ListContentRequest, - dict, -]) -def test_list_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse( - next_page_token='next_page_token_value', - ) - response = client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.ListContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListContentPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.ListContentRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.ListContentRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_content] = mock_rpc - request = {} - client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_content] = mock_rpc - - request = {} - await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_content_async(transport: str = 'grpc_asyncio', request_type=content.ListContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.ListContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListContentAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_content_async_from_dict(): - await test_list_content_async(request_type=dict) - -def test_list_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.ListContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = content.ListContentResponse() - client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.ListContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) - await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_content( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_content( - content.ListContentRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_content( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_content( - content.ListContentRequest(), - parent='parent_value', - ) - - -def test_list_content_pager(transport_name: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_content(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Content) - for i in results) -def test_list_content_pages(transport_name: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - pages = list(client.list_content(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_content_async_pager(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_content(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Content) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_content_async_pages(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_content(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ContentServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ContentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ContentServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = analyze.Content() - client.create_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.CreateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = analyze.Content() - client.update_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.UpdateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = None - client.delete_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.DeleteContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = analyze.Content() - client.get_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.GetContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = content.ListContentResponse() - client.list_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.ListContentRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ContentServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.create_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.CreateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.update_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.UpdateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.DeleteContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.get_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.GetContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( - next_page_token='next_page_token_value', - )) - await client.list_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.ListContentRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ContentServiceGrpcTransport, - ) - -def test_content_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ContentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_content_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ContentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_content', - 'update_content', - 'delete_content', - 'get_content', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'list_content', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_content_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ContentServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_content_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ContentServiceTransport() - adc.assert_called_once() - - -def test_content_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ContentServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, - ], -) -def test_content_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, - ], -) -def test_content_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ContentServiceGrpcTransport, grpc_helpers), - (transports.ContentServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_content_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_content_service_host_no_port(transport_name): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_content_service_host_with_port(transport_name): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_content_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ContentServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_content_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ContentServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_content_path(): - project = "squid" - location = "clam" - lake = "whelk" - content = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) - actual = ContentServiceClient.content_path(project, location, lake, content) - assert expected == actual - - -def test_parse_content_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "content": "mussel", - } - path = ContentServiceClient.content_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_content_path(path) - assert expected == actual - -def test_lake_path(): - project = "winkle" - location = "nautilus" - lake = "scallop" - expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - actual = ContentServiceClient.lake_path(project, location, lake) - assert expected == actual - - -def test_parse_lake_path(): - expected = { - "project": "abalone", - "location": "squid", - "lake": "clam", - } - path = ContentServiceClient.lake_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_lake_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ContentServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ContentServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = ContentServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ContentServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ContentServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ContentServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = ContentServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ContentServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ContentServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ContentServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ContentServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py deleted file mode 100644 index 04fa20e609a4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ /dev/null @@ -1,6013 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceAsyncClient -from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceClient -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.services.data_scan_service import transports -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataScanServiceClient._get_default_mtls_endpoint(None) is None - assert DataScanServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataScanServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataScanServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataScanServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataScanServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataScanServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataScanServiceClient._get_client_cert_source(None, False) is None - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataScanServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataScanServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataScanServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataScanServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataScanServiceClient._get_universe_domain(None, None) == DataScanServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataScanServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataScanServiceClient, "grpc"), - (DataScanServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_scan_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataScanServiceGrpcTransport, "grpc"), - (transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_scan_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataScanServiceClient, "grpc"), - (DataScanServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_scan_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_data_scan_service_client_get_transport_class(): - transport = DataScanServiceClient.get_transport_class() - available_transports = [ - transports.DataScanServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DataScanServiceClient.get_transport_class("grpc") - assert transport == transports.DataScanServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test_data_scan_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "true"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "false"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_scan_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataScanServiceClient, DataScanServiceAsyncClient -]) -@mock.patch.object(DataScanServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceAsyncClient)) -def test_data_scan_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataScanServiceClient, DataScanServiceAsyncClient -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test_data_scan_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_scan_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_scan_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_scan_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataScanServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_scan_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.CreateDataScanRequest, - dict, -]) -def test_create_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.CreateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.CreateDataScanRequest( - parent='parent_value', - data_scan_id='data_scan_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.CreateDataScanRequest( - parent='parent_value', - data_scan_id='data_scan_id_value', - ) - -def test_create_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc - request = {} - client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_scan] = mock_rpc - - request = {} - await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.CreateDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.CreateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_scan_async_from_dict(): - await test_create_data_scan_async(request_type=dict) - -def test_create_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.CreateDataScanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.CreateDataScanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_scan( - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].data_scan_id - mock_val = 'data_scan_id_value' - assert arg == mock_val - - -def test_create_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_scan( - datascans.CreateDataScanRequest(), - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_scan( - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].data_scan_id - mock_val = 'data_scan_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_scan( - datascans.CreateDataScanRequest(), - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.UpdateDataScanRequest, - dict, -]) -def test_update_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.UpdateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.UpdateDataScanRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.UpdateDataScanRequest( - ) - -def test_update_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc - request = {} - client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_scan] = mock_rpc - - request = {} - await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.UpdateDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.UpdateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_scan_async_from_dict(): - await test_update_data_scan_async(request_type=dict) - -def test_update_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.UpdateDataScanRequest() - - request.data_scan.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_scan.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.UpdateDataScanRequest() - - request.data_scan.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_scan.name=name_value', - ) in kw['metadata'] - - -def test_update_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_scan( - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_scan( - datascans.UpdateDataScanRequest(), - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_scan( - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_scan( - datascans.UpdateDataScanRequest(), - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.DeleteDataScanRequest, - dict, -]) -def test_delete_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.DeleteDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.DeleteDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.DeleteDataScanRequest( - name='name_value', - ) - -def test_delete_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc - request = {} - client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_scan] = mock_rpc - - request = {} - await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.DeleteDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.DeleteDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_scan_async_from_dict(): - await test_delete_data_scan_async(request_type=dict) - -def test_delete_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.DeleteDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.DeleteDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_scan( - datascans.DeleteDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_scan( - datascans.DeleteDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanRequest, - dict, -]) -def test_get_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - ) - response = client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScan) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -def test_get_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GetDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GetDataScanRequest( - name='name_value', - ) - -def test_get_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc - request = {} - client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_scan] = mock_rpc - - request = {} - await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - )) - response = await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScan) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.asyncio -async def test_get_data_scan_async_from_dict(): - await test_get_data_scan_async(request_type=dict) - -def test_get_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = datascans.DataScan() - client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) - await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan( - datascans.GetDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_scan( - datascans.GetDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScansRequest, - dict, -]) -def test_list_data_scans(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScansPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_data_scans_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.ListDataScansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_scans(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.ListDataScansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_scans_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scans in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc - request = {} - client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scans_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_scans in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_scans] = mock_rpc - - request = {} - await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_scans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scans_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScansRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScansAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_data_scans_async_from_dict(): - await test_list_data_scans_async(request_type=dict) - -def test_list_data_scans_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = datascans.ListDataScansResponse() - client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_scans_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) - await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_scans_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_scans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_scans_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scans( - datascans.ListDataScansRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_scans_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_scans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_scans_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_scans( - datascans.ListDataScansRequest(), - parent='parent_value', - ) - - -def test_list_data_scans_pager(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_scans(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScan) - for i in results) -def test_list_data_scans_pages(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_scans(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_scans_async_pager(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_scans(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datascans.DataScan) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_scans_async_pages(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_scans(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datascans.RunDataScanRequest, - dict, -]) -def test_run_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse( - ) - response = client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.RunDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.RunDataScanResponse) - - -def test_run_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.RunDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.run_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.RunDataScanRequest( - name='name_value', - ) - -def test_run_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc - request = {} - client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.run_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.run_data_scan] = mock_rpc - - request = {} - await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.RunDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( - )) - response = await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.RunDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.RunDataScanResponse) - - -@pytest.mark.asyncio -async def test_run_data_scan_async_from_dict(): - await test_run_data_scan_async(request_type=dict) - -def test_run_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.RunDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = datascans.RunDataScanResponse() - client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.RunDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) - await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_run_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_run_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_data_scan( - datascans.RunDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_run_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_data_scan( - datascans.RunDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanJobRequest, - dict, -]) -def test_get_data_scan_job(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - ) - response = client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScanJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == datascans.DataScanJob.State.RUNNING - assert response.message == 'message_value' - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GetDataScanJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_scan_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GetDataScanJobRequest( - name='name_value', - ) - -def test_get_data_scan_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc - request = {} - client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_scan_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_scan_job] = mock_rpc - - request = {} - await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_scan_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_job_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanJobRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - )) - response = await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScanJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == datascans.DataScanJob.State.RUNNING - assert response.message == 'message_value' - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.asyncio -async def test_get_data_scan_job_async_from_dict(): - await test_get_data_scan_job_async(request_type=dict) - -def test_get_data_scan_job_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = datascans.DataScanJob() - client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_scan_job_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) - await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_scan_job_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_scan_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_scan_job_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan_job( - datascans.GetDataScanJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_scan_job_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_scan_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_scan_job_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_scan_job( - datascans.GetDataScanJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScanJobsRequest, - dict, -]) -def test_list_data_scan_jobs(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScanJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScanJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.ListDataScanJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_scan_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.ListDataScanJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_data_scan_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc - request = {} - client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scan_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_scan_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_scan_jobs] = mock_rpc - - request = {} - await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_scan_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScanJobsRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScanJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScanJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_from_dict(): - await test_list_data_scan_jobs_async(request_type=dict) - -def test_list_data_scan_jobs_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScanJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = datascans.ListDataScanJobsResponse() - client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScanJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) - await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_scan_jobs_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_scan_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_scan_jobs_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scan_jobs( - datascans.ListDataScanJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_scan_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_scan_jobs( - datascans.ListDataScanJobsRequest(), - parent='parent_value', - ) - - -def test_list_data_scan_jobs_pager(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_scan_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScanJob) - for i in results) -def test_list_data_scan_jobs_pages(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_scan_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_pager(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_scan_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datascans.DataScanJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_pages(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_scan_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datascans.GenerateDataQualityRulesRequest, - dict, -]) -def test_generate_data_quality_rules(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse( - ) - response = client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GenerateDataQualityRulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.GenerateDataQualityRulesResponse) - - -def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GenerateDataQualityRulesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_data_quality_rules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GenerateDataQualityRulesRequest( - name='name_value', - ) - -def test_generate_data_quality_rules_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc - request = {} - client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.generate_data_quality_rules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.generate_data_quality_rules in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.generate_data_quality_rules] = mock_rpc - - request = {} - await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.generate_data_quality_rules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async(transport: str = 'grpc_asyncio', request_type=datascans.GenerateDataQualityRulesRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( - )) - response = await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GenerateDataQualityRulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.GenerateDataQualityRulesResponse) - - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async_from_dict(): - await test_generate_data_quality_rules_async(request_type=dict) - -def test_generate_data_quality_rules_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GenerateDataQualityRulesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = datascans.GenerateDataQualityRulesResponse() - client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GenerateDataQualityRulesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) - await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_generate_data_quality_rules_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.generate_data_quality_rules( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_generate_data_quality_rules_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.generate_data_quality_rules( - datascans.GenerateDataQualityRulesRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.generate_data_quality_rules( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.generate_data_quality_rules( - datascans.GenerateDataQualityRulesRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataScanServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataScanServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataScanServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.CreateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.UpdateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.DeleteDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = datascans.DataScan() - client.get_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scans_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = datascans.ListDataScansResponse() - client.list_data_scans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = datascans.RunDataScanResponse() - client.run_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.RunDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_job_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = datascans.DataScanJob() - client.get_data_scan_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scan_jobs_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = datascans.ListDataScanJobsResponse() - client.list_data_scan_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScanJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_generate_data_quality_rules_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = datascans.GenerateDataQualityRulesResponse() - client.generate_data_quality_rules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GenerateDataQualityRulesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataScanServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.CreateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.UpdateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.DeleteDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - )) - await client.get_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_scans_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_data_scans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( - )) - await client.run_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.RunDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_scan_job_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - )) - await client.get_data_scan_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_scan_jobs_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_scan_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScanJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_generate_data_quality_rules_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( - )) - await client.generate_data_quality_rules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GenerateDataQualityRulesRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataScanServiceGrpcTransport, - ) - -def test_data_scan_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataScanServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_scan_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataScanServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_scan', - 'update_data_scan', - 'delete_data_scan', - 'get_data_scan', - 'list_data_scans', - 'run_data_scan', - 'get_data_scan_job', - 'list_data_scan_jobs', - 'generate_data_quality_rules', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_scan_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataScanServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_scan_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataScanServiceTransport() - adc.assert_called_once() - - -def test_data_scan_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataScanServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, - ], -) -def test_data_scan_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, - ], -) -def test_data_scan_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataScanServiceGrpcTransport, grpc_helpers), - (transports.DataScanServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_scan_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_scan_service_host_no_port(transport_name): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_scan_service_host_with_port(transport_name): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_data_scan_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataScanServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_scan_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataScanServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_scan_service_grpc_lro_client(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_scan_service_grpc_lro_async_client(): - client = DataScanServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_connection_path(): - project = "squid" - location = "clam" - connection = "whelk" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = DataScanServiceClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection": "nudibranch", - } - path = DataScanServiceClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_connection_path(path) - assert expected == actual - -def test_data_scan_path(): - project = "cuttlefish" - location = "mussel" - dataScan = "winkle" - expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) - actual = DataScanServiceClient.data_scan_path(project, location, dataScan) - assert expected == actual - - -def test_parse_data_scan_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dataScan": "abalone", - } - path = DataScanServiceClient.data_scan_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_data_scan_path(path) - assert expected == actual - -def test_data_scan_job_path(): - project = "squid" - location = "clam" - dataScan = "whelk" - job = "octopus" - expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) - actual = DataScanServiceClient.data_scan_job_path(project, location, dataScan, job) - assert expected == actual - - -def test_parse_data_scan_job_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataScan": "cuttlefish", - "job": "mussel", - } - path = DataScanServiceClient.data_scan_job_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_data_scan_job_path(path) - assert expected == actual - -def test_dataset_path(): - project = "winkle" - dataset = "nautilus" - expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - actual = DataScanServiceClient.dataset_path(project, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "scallop", - "dataset": "abalone", - } - path = DataScanServiceClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_dataset_path(path) - assert expected == actual - -def test_entity_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - entity = "oyster" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - actual = DataScanServiceClient.entity_path(project, location, lake, zone, entity) - assert expected == actual - - -def test_parse_entity_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "lake": "mussel", - "zone": "winkle", - "entity": "nautilus", - } - path = DataScanServiceClient.entity_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_entity_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataScanServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = DataScanServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataScanServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = DataScanServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataScanServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = DataScanServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = DataScanServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = DataScanServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataScanServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = DataScanServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataScanServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py deleted file mode 100644 index 9ef5ff03519d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ /dev/null @@ -1,8505 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceAsyncClient -from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceClient -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.services.data_taxonomy_service import transports -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(None) is None - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataTaxonomyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTaxonomyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataTaxonomyServiceClient._get_client_cert_source(None, False) is None - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataTaxonomyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataTaxonomyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataTaxonomyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataTaxonomyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataTaxonomyServiceClient._get_universe_domain(None, None) == DataTaxonomyServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataTaxonomyServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTaxonomyServiceClient, "grpc"), - (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_taxonomy_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_taxonomy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTaxonomyServiceClient, "grpc"), - (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_taxonomy_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_data_taxonomy_service_client_get_transport_class(): - transport = DataTaxonomyServiceClient.get_transport_class() - available_transports = [ - transports.DataTaxonomyServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DataTaxonomyServiceClient.get_transport_class("grpc") - assert transport == transports.DataTaxonomyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "true"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "false"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_taxonomy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient -]) -@mock.patch.object(DataTaxonomyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_taxonomy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_taxonomy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_taxonomy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataTaxonomyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_taxonomy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.CreateDataTaxonomyRequest, - dict, -]) -def test_create_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest( - parent='parent_value', - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_data_taxonomy.CreateDataTaxonomyRequest( - parent='parent_value', - data_taxonomy_id='data_taxonomy_id_value', - ) - -def test_create_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc - request = {} - client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_taxonomy] = mock_rpc - - request = {} - await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async_from_dict(): - await test_create_data_taxonomy_async(request_type=dict) - -def test_create_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_taxonomy( - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].data_taxonomy_id - mock_val = 'data_taxonomy_id_value' - assert arg == mock_val - - -def test_create_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_taxonomy( - gcd_data_taxonomy.CreateDataTaxonomyRequest(), - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_taxonomy( - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].data_taxonomy_id - mock_val = 'data_taxonomy_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_taxonomy( - gcd_data_taxonomy.CreateDataTaxonomyRequest(), - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.UpdateDataTaxonomyRequest, - dict, -]) -def test_update_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest( - ) - -def test_update_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc - request = {} - client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_taxonomy] = mock_rpc - - request = {} - await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async_from_dict(): - await test_update_data_taxonomy_async(request_type=dict) - -def test_update_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - request.data_taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_taxonomy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - request.data_taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_taxonomy.name=name_value', - ) in kw['metadata'] - - -def test_update_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_taxonomy( - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_taxonomy( - gcd_data_taxonomy.UpdateDataTaxonomyRequest(), - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_taxonomy( - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_taxonomy( - gcd_data_taxonomy.UpdateDataTaxonomyRequest(), - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataTaxonomyRequest, - dict, -]) -def test_delete_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataTaxonomyRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataTaxonomyRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc - request = {} - client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_taxonomy] = mock_rpc - - request = {} - await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async_from_dict(): - await test_delete_data_taxonomy_async(request_type=dict) - -def test_delete_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_taxonomy( - data_taxonomy.DeleteDataTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_taxonomy( - data_taxonomy.DeleteDataTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataTaxonomiesRequest, - dict, -]) -def test_list_data_taxonomies(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataTaxonomiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataTaxonomiesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataTaxonomiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_taxonomies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataTaxonomiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_taxonomies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_taxonomies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc - request = {} - client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_taxonomies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_taxonomies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_taxonomies] = mock_rpc - - request = {} - await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_taxonomies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataTaxonomiesRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataTaxonomiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataTaxonomiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_from_dict(): - await test_list_data_taxonomies_async(request_type=dict) - -def test_list_data_taxonomies_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) - await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_taxonomies_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_taxonomies_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_taxonomies( - data_taxonomy.ListDataTaxonomiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_taxonomies_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_taxonomies_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_taxonomies( - data_taxonomy.ListDataTaxonomiesRequest(), - parent='parent_value', - ) - - -def test_list_data_taxonomies_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_taxonomies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataTaxonomy) - for i in results) -def test_list_data_taxonomies_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_taxonomies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_taxonomies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataTaxonomy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_taxonomies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataTaxonomyRequest, - dict, -]) -def test_get_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - ) - response = client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataTaxonomy) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - assert response.class_count == 1182 - - -def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataTaxonomyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataTaxonomyRequest( - name='name_value', - ) - -def test_get_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc - request = {} - client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_taxonomy] = mock_rpc - - request = {} - await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - )) - response = await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataTaxonomy) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - assert response.class_count == 1182 - - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async_from_dict(): - await test_get_data_taxonomy_async(request_type=dict) - -def test_get_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = data_taxonomy.DataTaxonomy() - client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) - await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_taxonomy( - data_taxonomy.GetDataTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_taxonomy( - data_taxonomy.GetDataTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeBindingRequest, - dict, -]) -def test_create_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.CreateDataAttributeBindingRequest( - parent='parent_value', - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.CreateDataAttributeBindingRequest( - parent='parent_value', - data_attribute_binding_id='data_attribute_binding_id_value', - ) - -def test_create_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc - request = {} - client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_attribute_binding] = mock_rpc - - request = {} - await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async_from_dict(): - await test_create_data_attribute_binding_async(request_type=dict) - -def test_create_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeBindingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeBindingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_attribute_binding( - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_binding_id - mock_val = 'data_attribute_binding_id_value' - assert arg == mock_val - - -def test_create_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute_binding( - data_taxonomy.CreateDataAttributeBindingRequest(), - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_attribute_binding( - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_binding_id - mock_val = 'data_attribute_binding_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_attribute_binding( - data_taxonomy.CreateDataAttributeBindingRequest(), - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeBindingRequest, - dict, -]) -def test_update_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.UpdateDataAttributeBindingRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest( - ) - -def test_update_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc - request = {} - client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_attribute_binding] = mock_rpc - - request = {} - await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async_from_dict(): - await test_update_data_attribute_binding_async(request_type=dict) - -def test_update_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeBindingRequest() - - request.data_attribute_binding.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute_binding.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeBindingRequest() - - request.data_attribute_binding.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute_binding.name=name_value', - ) in kw['metadata'] - - -def test_update_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_attribute_binding( - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute_binding( - data_taxonomy.UpdateDataAttributeBindingRequest(), - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_attribute_binding( - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_attribute_binding( - data_taxonomy.UpdateDataAttributeBindingRequest(), - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeBindingRequest, - dict, -]) -def test_delete_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataAttributeBindingRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataAttributeBindingRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc - request = {} - client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute_binding] = mock_rpc - - request = {} - await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async_from_dict(): - await test_delete_data_attribute_binding_async(request_type=dict) - -def test_delete_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute_binding( - data_taxonomy.DeleteDataAttributeBindingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_attribute_binding( - data_taxonomy.DeleteDataAttributeBindingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributeBindingsRequest, - dict, -]) -def test_list_data_attribute_bindings(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributeBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributeBindingsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataAttributeBindingsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_attribute_bindings(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataAttributeBindingsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_attribute_bindings_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc - request = {} - client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attribute_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_attribute_bindings in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_attribute_bindings] = mock_rpc - - request = {} - await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_attribute_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributeBindingsRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributeBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributeBindingsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_from_dict(): - await test_list_data_attribute_bindings_async(request_type=dict) - -def test_list_data_attribute_bindings_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributeBindingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributeBindingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) - await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_attribute_bindings_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_attribute_bindings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_attribute_bindings_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attribute_bindings( - data_taxonomy.ListDataAttributeBindingsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_attribute_bindings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_attribute_bindings( - data_taxonomy.ListDataAttributeBindingsRequest(), - parent='parent_value', - ) - - -def test_list_data_attribute_bindings_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_attribute_bindings(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttributeBinding) - for i in results) -def test_list_data_attribute_bindings_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_attribute_bindings(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_attribute_bindings(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataAttributeBinding) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_attribute_bindings(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeBindingRequest, - dict, -]) -def test_get_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - resource='resource_value', - ) - response = client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttributeBinding) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.attributes == ['attributes_value'] - - -def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataAttributeBindingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataAttributeBindingRequest( - name='name_value', - ) - -def test_get_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc - request = {} - client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_attribute_binding] = mock_rpc - - request = {} - await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - )) - response = await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttributeBinding) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.attributes == ['attributes_value'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async_from_dict(): - await test_get_data_attribute_binding_async(request_type=dict) - -def test_get_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = data_taxonomy.DataAttributeBinding() - client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) - await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute_binding( - data_taxonomy.GetDataAttributeBindingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_attribute_binding( - data_taxonomy.GetDataAttributeBindingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeRequest, - dict, -]) -def test_create_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.CreateDataAttributeRequest( - parent='parent_value', - data_attribute_id='data_attribute_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.CreateDataAttributeRequest( - parent='parent_value', - data_attribute_id='data_attribute_id_value', - ) - -def test_create_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc - request = {} - client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_attribute] = mock_rpc - - request = {} - await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_attribute_async_from_dict(): - await test_create_data_attribute_async(request_type=dict) - -def test_create_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_attribute( - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_id - mock_val = 'data_attribute_id_value' - assert arg == mock_val - - -def test_create_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute( - data_taxonomy.CreateDataAttributeRequest(), - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_attribute( - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_id - mock_val = 'data_attribute_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_attribute( - data_taxonomy.CreateDataAttributeRequest(), - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeRequest, - dict, -]) -def test_update_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.UpdateDataAttributeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.UpdateDataAttributeRequest( - ) - -def test_update_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc - request = {} - client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_attribute] = mock_rpc - - request = {} - await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_attribute_async_from_dict(): - await test_update_data_attribute_async(request_type=dict) - -def test_update_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeRequest() - - request.data_attribute.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeRequest() - - request.data_attribute.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute.name=name_value', - ) in kw['metadata'] - - -def test_update_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_attribute( - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute( - data_taxonomy.UpdateDataAttributeRequest(), - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_attribute( - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_attribute( - data_taxonomy.UpdateDataAttributeRequest(), - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeRequest, - dict, -]) -def test_delete_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataAttributeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataAttributeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc - request = {} - client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute] = mock_rpc - - request = {} - await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_attribute_async_from_dict(): - await test_delete_data_attribute_async(request_type=dict) - -def test_delete_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute( - data_taxonomy.DeleteDataAttributeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_attribute( - data_taxonomy.DeleteDataAttributeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributesRequest, - dict, -]) -def test_list_data_attributes(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_attributes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataAttributesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_attributes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataAttributesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_attributes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc - request = {} - client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attributes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_attributes in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_attributes] = mock_rpc - - request = {} - await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attributes_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributesRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_attributes_async_from_dict(): - await test_list_data_attributes_async(request_type=dict) - -def test_list_data_attributes_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributesResponse() - client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_attributes_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) - await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_attributes_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_attributes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_attributes_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attributes( - data_taxonomy.ListDataAttributesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_attributes_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_attributes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_attributes_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_attributes( - data_taxonomy.ListDataAttributesRequest(), - parent='parent_value', - ) - - -def test_list_data_attributes_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_attributes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttribute) - for i in results) -def test_list_data_attributes_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_attributes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_attributes_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_attributes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataAttribute) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_attributes_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_attributes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeRequest, - dict, -]) -def test_get_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - ) - response = client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttribute) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.parent_id == 'parent_id_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - - -def test_get_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataAttributeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataAttributeRequest( - name='name_value', - ) - -def test_get_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc - request = {} - client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_attribute] = mock_rpc - - request = {} - await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - )) - response = await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttribute) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.parent_id == 'parent_id_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_data_attribute_async_from_dict(): - await test_get_data_attribute_async(request_type=dict) - -def test_get_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = data_taxonomy.DataAttribute() - client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) - await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute( - data_taxonomy.GetDataAttributeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_attribute( - data_taxonomy.GetDataAttributeRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataTaxonomyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataTaxonomyServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_taxonomies_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - client.list_data_taxonomies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataTaxonomiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = data_taxonomy.DataTaxonomy() - client.get_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attribute_bindings_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - client.list_data_attribute_bindings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributeBindingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = data_taxonomy.DataAttributeBinding() - client.get_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attributes_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributesResponse() - client.list_data_attributes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = data_taxonomy.DataAttribute() - client.get_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataTaxonomyServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_taxonomies_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_taxonomies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataTaxonomiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - )) - await client.get_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_attribute_bindings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributeBindingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - )) - await client.get_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_attributes_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_attributes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - )) - await client.get_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataTaxonomyServiceGrpcTransport, - ) - -def test_data_taxonomy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataTaxonomyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_taxonomy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataTaxonomyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_taxonomy', - 'update_data_taxonomy', - 'delete_data_taxonomy', - 'list_data_taxonomies', - 'get_data_taxonomy', - 'create_data_attribute_binding', - 'update_data_attribute_binding', - 'delete_data_attribute_binding', - 'list_data_attribute_bindings', - 'get_data_attribute_binding', - 'create_data_attribute', - 'update_data_attribute', - 'delete_data_attribute', - 'list_data_attributes', - 'get_data_attribute', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_taxonomy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTaxonomyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_taxonomy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTaxonomyServiceTransport() - adc.assert_called_once() - - -def test_data_taxonomy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataTaxonomyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, - ], -) -def test_data_taxonomy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, - ], -) -def test_data_taxonomy_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataTaxonomyServiceGrpcTransport, grpc_helpers), - (transports.DataTaxonomyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_taxonomy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_taxonomy_service_host_no_port(transport_name): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_taxonomy_service_host_with_port(transport_name): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_data_taxonomy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTaxonomyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_taxonomy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_taxonomy_service_grpc_lro_client(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_taxonomy_service_grpc_lro_async_client(): - client = DataTaxonomyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_attribute_path(): - project = "squid" - location = "clam" - dataTaxonomy = "whelk" - data_attribute_id = "octopus" - expected = "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) - actual = DataTaxonomyServiceClient.data_attribute_path(project, location, dataTaxonomy, data_attribute_id) - assert expected == actual - - -def test_parse_data_attribute_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataTaxonomy": "cuttlefish", - "data_attribute_id": "mussel", - } - path = DataTaxonomyServiceClient.data_attribute_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_attribute_path(path) - assert expected == actual - -def test_data_attribute_binding_path(): - project = "winkle" - location = "nautilus" - data_attribute_binding_id = "scallop" - expected = "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) - actual = DataTaxonomyServiceClient.data_attribute_binding_path(project, location, data_attribute_binding_id) - assert expected == actual - - -def test_parse_data_attribute_binding_path(): - expected = { - "project": "abalone", - "location": "squid", - "data_attribute_binding_id": "clam", - } - path = DataTaxonomyServiceClient.data_attribute_binding_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_attribute_binding_path(path) - assert expected == actual - -def test_data_taxonomy_path(): - project = "whelk" - location = "octopus" - data_taxonomy_id = "oyster" - expected = "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) - actual = DataTaxonomyServiceClient.data_taxonomy_path(project, location, data_taxonomy_id) - assert expected == actual - - -def test_parse_data_taxonomy_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "data_taxonomy_id": "mussel", - } - path = DataTaxonomyServiceClient.data_taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataTaxonomyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DataTaxonomyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataTaxonomyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DataTaxonomyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataTaxonomyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DataTaxonomyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DataTaxonomyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DataTaxonomyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataTaxonomyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DataTaxonomyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataTaxonomyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py deleted file mode 100644 index 40bd69b6232e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ /dev/null @@ -1,16649 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceAsyncClient -from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceClient -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.services.dataplex_service import transports -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataplexServiceClient._get_default_mtls_endpoint(None) is None - assert DataplexServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataplexServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataplexServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataplexServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataplexServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataplexServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataplexServiceClient._get_client_cert_source(None, False) is None - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataplexServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataplexServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataplexServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataplexServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataplexServiceClient._get_universe_domain(None, None) == DataplexServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataplexServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataplexServiceClient, "grpc"), - (DataplexServiceAsyncClient, "grpc_asyncio"), -]) -def test_dataplex_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataplexServiceGrpcTransport, "grpc"), - (transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_dataplex_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataplexServiceClient, "grpc"), - (DataplexServiceAsyncClient, "grpc_asyncio"), -]) -def test_dataplex_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_dataplex_service_client_get_transport_class(): - transport = DataplexServiceClient.get_transport_class() - available_transports = [ - transports.DataplexServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DataplexServiceClient.get_transport_class("grpc") - assert transport == transports.DataplexServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test_dataplex_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "true"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "false"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dataplex_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataplexServiceClient, DataplexServiceAsyncClient -]) -@mock.patch.object(DataplexServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceAsyncClient)) -def test_dataplex_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataplexServiceClient, DataplexServiceAsyncClient -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test_dataplex_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_dataplex_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dataplex_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dataplex_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataplexServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dataplex_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateLakeRequest, - dict, -]) -def test_create_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateLakeRequest( - parent='parent_value', - lake_id='lake_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateLakeRequest( - parent='parent_value', - lake_id='lake_id_value', - ) - -def test_create_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc - request = {} - client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_lake] = mock_rpc - - request = {} - await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lake_async(transport: str = 'grpc_asyncio', request_type=service.CreateLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_lake_async_from_dict(): - await test_create_lake_async(request_type=dict) - -def test_create_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateLakeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateLakeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_lake( - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].lake_id - mock_val = 'lake_id_value' - assert arg == mock_val - - -def test_create_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_lake( - service.CreateLakeRequest(), - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - -@pytest.mark.asyncio -async def test_create_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_lake( - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].lake_id - mock_val = 'lake_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_lake( - service.CreateLakeRequest(), - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateLakeRequest, - dict, -]) -def test_update_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateLakeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateLakeRequest( - ) - -def test_update_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc - request = {} - client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_lake] = mock_rpc - - request = {} - await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_lake_async(transport: str = 'grpc_asyncio', request_type=service.UpdateLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_lake_async_from_dict(): - await test_update_lake_async(request_type=dict) - -def test_update_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateLakeRequest() - - request.lake.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'lake.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateLakeRequest() - - request.lake.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'lake.name=name_value', - ) in kw['metadata'] - - -def test_update_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_lake( - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_lake( - service.UpdateLakeRequest(), - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_lake( - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_lake( - service.UpdateLakeRequest(), - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteLakeRequest, - dict, -]) -def test_delete_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteLakeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteLakeRequest( - name='name_value', - ) - -def test_delete_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc - request = {} - client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_lake] = mock_rpc - - request = {} - await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lake_async(transport: str = 'grpc_asyncio', request_type=service.DeleteLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_lake_async_from_dict(): - await test_delete_lake_async(request_type=dict) - -def test_delete_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_lake( - service.DeleteLakeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_lake( - service.DeleteLakeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakesRequest, - dict, -]) -def test_list_lakes(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListLakesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_lakes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListLakesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_lakes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListLakesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_lakes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lakes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc - request = {} - client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lakes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_lakes in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_lakes] = mock_rpc - - request = {} - await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_lakes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lakes_async(transport: str = 'grpc_asyncio', request_type=service.ListLakesRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListLakesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_lakes_async_from_dict(): - await test_list_lakes_async(request_type=dict) - -def test_list_lakes_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = service.ListLakesResponse() - client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_lakes_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) - await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_lakes_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_lakes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_lakes_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lakes( - service.ListLakesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_lakes_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_lakes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_lakes_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_lakes( - service.ListLakesRequest(), - parent='parent_value', - ) - - -def test_list_lakes_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_lakes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Lake) - for i in results) -def test_list_lakes_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - pages = list(client.list_lakes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_lakes_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_lakes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Lake) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_lakes_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_lakes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetLakeRequest, - dict, -]) -def test_get_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - ) - response = client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Lake) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.service_account == 'service_account_value' - - -def test_get_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetLakeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetLakeRequest( - name='name_value', - ) - -def test_get_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc - request = {} - client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_lake] = mock_rpc - - request = {} - await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lake_async(transport: str = 'grpc_asyncio', request_type=service.GetLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - )) - response = await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Lake) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_lake_async_from_dict(): - await test_get_lake_async(request_type=dict) - -def test_get_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = resources.Lake() - client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) - await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lake( - service.GetLakeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_lake( - service.GetLakeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakeActionsRequest, - dict, -]) -def test_list_lake_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListLakeActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakeActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_lake_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListLakeActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_lake_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListLakeActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_lake_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lake_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc - request = {} - client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lake_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lake_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_lake_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_lake_actions] = mock_rpc - - request = {} - await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_lake_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lake_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListLakeActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListLakeActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakeActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_lake_actions_async_from_dict(): - await test_list_lake_actions_async(request_type=dict) - -def test_list_lake_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakeActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_lake_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakeActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_lake_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_lake_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_lake_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lake_actions( - service.ListLakeActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_lake_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_lake_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_lake_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_lake_actions( - service.ListLakeActionsRequest(), - parent='parent_value', - ) - - -def test_list_lake_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_lake_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_lake_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_lake_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_lake_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_lake_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_lake_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_lake_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateZoneRequest, - dict, -]) -def test_create_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateZoneRequest( - parent='parent_value', - zone_id='zone_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest( - parent='parent_value', - zone_id='zone_id_value', - ) - -def test_create_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc - request = {} - client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_zone] = mock_rpc - - request = {} - await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_zone_async(transport: str = 'grpc_asyncio', request_type=service.CreateZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_zone_async_from_dict(): - await test_create_zone_async(request_type=dict) - -def test_create_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_zone( - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].zone_id - mock_val = 'zone_id_value' - assert arg == mock_val - - -def test_create_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_zone( - service.CreateZoneRequest(), - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - -@pytest.mark.asyncio -async def test_create_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_zone( - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].zone_id - mock_val = 'zone_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_zone( - service.CreateZoneRequest(), - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateZoneRequest, - dict, -]) -def test_update_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateZoneRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest( - ) - -def test_update_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc - request = {} - client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_zone] = mock_rpc - - request = {} - await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_zone_async(transport: str = 'grpc_asyncio', request_type=service.UpdateZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_zone_async_from_dict(): - await test_update_zone_async(request_type=dict) - -def test_update_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() - - request.zone.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'zone.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() - - request.zone.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'zone.name=name_value', - ) in kw['metadata'] - - -def test_update_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_zone( - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_zone( - service.UpdateZoneRequest(), - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_zone( - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_zone( - service.UpdateZoneRequest(), - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteZoneRequest, - dict, -]) -def test_delete_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteZoneRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest( - name='name_value', - ) - -def test_delete_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc - request = {} - client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_zone] = mock_rpc - - request = {} - await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_zone_async(transport: str = 'grpc_asyncio', request_type=service.DeleteZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_zone_async_from_dict(): - await test_delete_zone_async(request_type=dict) - -def test_delete_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_zone( - service.DeleteZoneRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_zone( - service.DeleteZoneRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListZonesRequest, - dict, -]) -def test_list_zones(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_zones_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZonesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_zones(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_zones_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zones in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc - request = {} - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zones(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_zones in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_zones] = mock_rpc - - request = {} - await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_zones(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zones_async(transport: str = 'grpc_asyncio', request_type=service.ListZonesRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_zones_async_from_dict(): - await test_list_zones_async(request_type=dict) - -def test_list_zones_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_zones_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) - await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_zones_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_zones( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_zones_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zones( - service.ListZonesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_zones_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_zones( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_zones_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_zones( - service.ListZonesRequest(), - parent='parent_value', - ) - - -def test_list_zones_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_zones(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Zone) - for i in results) -def test_list_zones_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = list(client.list_zones(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_zones_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_zones(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Zone) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_zones_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zones(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetZoneRequest, - dict, -]) -def test_get_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - ) - response = client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == resources.Zone.Type.RAW - - -def test_get_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetZoneRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest( - name='name_value', - ) - -def test_get_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc - request = {} - client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_zone] = mock_rpc - - request = {} - await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_zone_async(transport: str = 'grpc_asyncio', request_type=service.GetZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - )) - response = await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == resources.Zone.Type.RAW - - -@pytest.mark.asyncio -async def test_get_zone_async_from_dict(): - await test_get_zone_async(request_type=dict) - -def test_get_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = resources.Zone() - client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_zone( - service.GetZoneRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_zone( - service.GetZoneRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListZoneActionsRequest, - dict, -]) -def test_list_zone_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZoneActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZoneActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_zone_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZoneActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_zone_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZoneActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_zone_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zone_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc - request = {} - client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zone_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zone_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_zone_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_zone_actions] = mock_rpc - - request = {} - await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_zone_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zone_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListZoneActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListZoneActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZoneActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_zone_actions_async_from_dict(): - await test_list_zone_actions_async(request_type=dict) - -def test_list_zone_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZoneActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_zone_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZoneActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_zone_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_zone_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_zone_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zone_actions( - service.ListZoneActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_zone_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_zone_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_zone_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_zone_actions( - service.ListZoneActionsRequest(), - parent='parent_value', - ) - - -def test_list_zone_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_zone_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_zone_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_zone_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_zone_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_zone_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_zone_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zone_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateAssetRequest, - dict, -]) -def test_create_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateAssetRequest( - parent='parent_value', - asset_id='asset_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateAssetRequest( - parent='parent_value', - asset_id='asset_id_value', - ) - -def test_create_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc - request = {} - client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_asset] = mock_rpc - - request = {} - await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_asset_async(transport: str = 'grpc_asyncio', request_type=service.CreateAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_asset_async_from_dict(): - await test_create_asset_async(request_type=dict) - -def test_create_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateAssetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateAssetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_asset( - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].asset_id - mock_val = 'asset_id_value' - assert arg == mock_val - - -def test_create_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_asset( - service.CreateAssetRequest(), - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - -@pytest.mark.asyncio -async def test_create_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_asset( - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].asset_id - mock_val = 'asset_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_asset( - service.CreateAssetRequest(), - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateAssetRequest, - dict, -]) -def test_update_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateAssetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateAssetRequest( - ) - -def test_update_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc - request = {} - client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_asset] = mock_rpc - - request = {} - await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_asset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_asset_async_from_dict(): - await test_update_asset_async(request_type=dict) - -def test_update_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateAssetRequest() - - request.asset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'asset.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateAssetRequest() - - request.asset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'asset.name=name_value', - ) in kw['metadata'] - - -def test_update_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_asset( - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_asset( - service.UpdateAssetRequest(), - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_asset( - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_asset( - service.UpdateAssetRequest(), - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteAssetRequest, - dict, -]) -def test_delete_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteAssetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteAssetRequest( - name='name_value', - ) - -def test_delete_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc - request = {} - client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_asset] = mock_rpc - - request = {} - await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_asset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_asset_async_from_dict(): - await test_delete_asset_async(request_type=dict) - -def test_delete_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_asset( - service.DeleteAssetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_asset( - service.DeleteAssetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc - - request = {} - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_assets_async_from_dict(): - await test_list_assets_async(request_type=dict) - -def test_list_assets_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = service.ListAssetsResponse() - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_assets_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_assets_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_assets_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - service.ListAssetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_assets_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_assets_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_assets( - service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_assets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Asset) - for i in results) -def test_list_assets_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_assets_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Asset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_assets_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_assets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetAssetRequest, - dict, -]) -def test_get_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - response = client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Asset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -def test_get_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetAssetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetAssetRequest( - name='name_value', - ) - -def test_get_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc - request = {} - client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_asset] = mock_rpc - - request = {} - await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_asset_async(transport: str = 'grpc_asyncio', request_type=service.GetAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - response = await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Asset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_asset_async_from_dict(): - await test_get_asset_async(request_type=dict) - -def test_get_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = resources.Asset() - client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) - await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_asset( - service.GetAssetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_asset( - service.GetAssetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetActionsRequest, - dict, -]) -def test_list_asset_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListAssetActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_asset_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListAssetActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_asset_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListAssetActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_asset_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_asset_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc - request = {} - client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_asset_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_asset_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_asset_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_asset_actions] = mock_rpc - - request = {} - await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_asset_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_asset_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListAssetActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_asset_actions_async_from_dict(): - await test_list_asset_actions_async(request_type=dict) - -def test_list_asset_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_asset_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_asset_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_asset_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_asset_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_asset_actions( - service.ListAssetActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_asset_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_asset_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_asset_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_asset_actions( - service.ListAssetActionsRequest(), - parent='parent_value', - ) - - -def test_list_asset_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_asset_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_asset_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_asset_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_asset_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_asset_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_asset_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_asset_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateTaskRequest, - dict, -]) -def test_create_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateTaskRequest( - parent='parent_value', - task_id='task_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateTaskRequest( - parent='parent_value', - task_id='task_id_value', - ) - -def test_create_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_task] = mock_rpc - request = {} - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_task] = mock_rpc - - request = {} - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=service.CreateTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_task_async_from_dict(): - await test_create_task_async(request_type=dict) - -def test_create_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateTaskRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateTaskRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_task( - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].task_id - mock_val = 'task_id_value' - assert arg == mock_val - - -def test_create_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - service.CreateTaskRequest(), - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - -@pytest.mark.asyncio -async def test_create_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_task( - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].task_id - mock_val = 'task_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_task( - service.CreateTaskRequest(), - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateTaskRequest, - dict, -]) -def test_update_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateTaskRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateTaskRequest( - ) - -def test_update_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_task] = mock_rpc - request = {} - client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_task] = mock_rpc - - request = {} - await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_task_async(transport: str = 'grpc_asyncio', request_type=service.UpdateTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_task_async_from_dict(): - await test_update_task_async(request_type=dict) - -def test_update_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTaskRequest() - - request.task.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'task.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTaskRequest() - - request.task.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'task.name=name_value', - ) in kw['metadata'] - - -def test_update_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_task( - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_task( - service.UpdateTaskRequest(), - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_task( - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_task( - service.UpdateTaskRequest(), - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteTaskRequest, - dict, -]) -def test_delete_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteTaskRequest( - name='name_value', - ) - -def test_delete_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc - request = {} - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_task] = mock_rpc - - request = {} - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=service.DeleteTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_task_async_from_dict(): - await test_delete_task_async(request_type=dict) - -def test_delete_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - service.DeleteTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_task( - service.DeleteTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListTasksRequest, - dict, -]) -def test_list_tasks(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListTasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListTasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc - - request = {} - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=service.ListTasksRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - -def test_list_tasks_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = service.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tasks_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - service.ListTasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - service.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Task) - for i in results) -def test_list_tasks_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tasks.Task) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetTaskRequest, - dict, -]) -def test_get_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Task) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetTaskRequest( - name='name_value', - ) - -def test_get_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc - - request = {} - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=service.GetTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Task) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - -def test_get_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = tasks.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - service.GetTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - service.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListJobsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListJobsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc - - request = {} - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=service.ListJobsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - -def test_list_jobs_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = service.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - service.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - service.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tasks.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.RunTaskRequest, - dict, -]) -def test_run_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse( - ) - response = client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.RunTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, service.RunTaskResponse) - - -def test_run_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.RunTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.run_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.RunTaskRequest( - name='name_value', - ) - -def test_run_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_task] = mock_rpc - request = {} - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.run_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.run_task] = mock_rpc - - request = {} - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=service.RunTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( - )) - response = await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.RunTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, service.RunTaskResponse) - - -@pytest.mark.asyncio -async def test_run_task_async_from_dict(): - await test_run_task_async(request_type=dict) - -def test_run_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.RunTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = service.RunTaskResponse() - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.RunTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_run_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_run_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - service.RunTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_run_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_task( - service.RunTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == tasks.Job.State.RUNNING - assert response.retry_count == 1214 - assert response.service == tasks.Job.Service.DATAPROC - assert response.service_job == 'service_job_value' - assert response.message == 'message_value' - assert response.trigger == tasks.Job.Trigger.TASK_CONFIG - - -def test_get_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetJobRequest( - name='name_value', - ) - -def test_get_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc - - request = {} - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=service.GetJobRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == tasks.Job.State.RUNNING - assert response.retry_count == 1214 - assert response.service == tasks.Job.Service.DATAPROC - assert response.service_job == 'service_job_value' - assert response.message == 'message_value' - assert response.trigger == tasks.Job.Trigger.TASK_CONFIG - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - -def test_get_job_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = tasks.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - service.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - service.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CancelJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CancelJobRequest( - name='name_value', - ) - -def test_cancel_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc - - request = {} - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=service.CancelJobRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - -def test_cancel_job_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = None - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - service.CancelJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - service.CancelJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateEnvironmentRequest, - dict, -]) -def test_create_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateEnvironmentRequest( - parent='parent_value', - environment_id='environment_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateEnvironmentRequest( - parent='parent_value', - environment_id='environment_id_value', - ) - -def test_create_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc - request = {} - client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_environment] = mock_rpc - - request = {} - await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_environment_async(transport: str = 'grpc_asyncio', request_type=service.CreateEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_environment_async_from_dict(): - await test_create_environment_async(request_type=dict) - -def test_create_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateEnvironmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateEnvironmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_environment( - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].environment_id - mock_val = 'environment_id_value' - assert arg == mock_val - - -def test_create_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_environment( - service.CreateEnvironmentRequest(), - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - -@pytest.mark.asyncio -async def test_create_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_environment( - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].environment_id - mock_val = 'environment_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_environment( - service.CreateEnvironmentRequest(), - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateEnvironmentRequest, - dict, -]) -def test_update_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateEnvironmentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateEnvironmentRequest( - ) - -def test_update_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc - request = {} - client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_environment] = mock_rpc - - request = {} - await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_environment_async(transport: str = 'grpc_asyncio', request_type=service.UpdateEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_environment_async_from_dict(): - await test_update_environment_async(request_type=dict) - -def test_update_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateEnvironmentRequest() - - request.environment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'environment.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateEnvironmentRequest() - - request.environment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'environment.name=name_value', - ) in kw['metadata'] - - -def test_update_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_environment( - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_environment( - service.UpdateEnvironmentRequest(), - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_environment( - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_environment( - service.UpdateEnvironmentRequest(), - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteEnvironmentRequest, - dict, -]) -def test_delete_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteEnvironmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteEnvironmentRequest( - name='name_value', - ) - -def test_delete_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc - request = {} - client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_environment] = mock_rpc - - request = {} - await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_environment_async(transport: str = 'grpc_asyncio', request_type=service.DeleteEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_environment_async_from_dict(): - await test_delete_environment_async(request_type=dict) - -def test_delete_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_environment( - service.DeleteEnvironmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_environment( - service.DeleteEnvironmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListEnvironmentsRequest, - dict, -]) -def test_list_environments(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListEnvironmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_environments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListEnvironmentsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_environments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListEnvironmentsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_environments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_environments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc - request = {} - client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_environments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_environments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_environments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_environments] = mock_rpc - - request = {} - await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_environments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_environments_async(transport: str = 'grpc_asyncio', request_type=service.ListEnvironmentsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListEnvironmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_environments_async_from_dict(): - await test_list_environments_async(request_type=dict) - -def test_list_environments_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListEnvironmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = service.ListEnvironmentsResponse() - client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_environments_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListEnvironmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) - await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_environments_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_environments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_environments_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_environments( - service.ListEnvironmentsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_environments_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_environments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_environments_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_environments( - service.ListEnvironmentsRequest(), - parent='parent_value', - ) - - -def test_list_environments_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_environments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Environment) - for i in results) -def test_list_environments_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_environments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_environments_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_environments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Environment) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_environments_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_environments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetEnvironmentRequest, - dict, -]) -def test_get_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - response = client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Environment) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -def test_get_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetEnvironmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetEnvironmentRequest( - name='name_value', - ) - -def test_get_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc - request = {} - client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_environment] = mock_rpc - - request = {} - await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_environment_async(transport: str = 'grpc_asyncio', request_type=service.GetEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - response = await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Environment) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_environment_async_from_dict(): - await test_get_environment_async(request_type=dict) - -def test_get_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = analyze.Environment() - client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) - await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_environment( - service.GetEnvironmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_environment( - service.GetEnvironmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListSessionsRequest, - dict, -]) -def test_list_sessions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListSessionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSessionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc - - request = {} - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=service.ListSessionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sessions_async_from_dict(): - await test_list_sessions_async(request_type=dict) - -def test_list_sessions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListSessionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = service.ListSessionsResponse() - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sessions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListSessionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_sessions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sessions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_sessions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - service.ListSessionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_sessions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sessions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_sessions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sessions( - service.ListSessionsRequest(), - parent='parent_value', - ) - - -def test_list_sessions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_sessions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Session) - for i in results) -def test_list_sessions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sessions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sessions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sessions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Session) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_sessions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_sessions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataplexServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataplexServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataplexServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lakes_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = service.ListLakesResponse() - client.list_lakes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = resources.Lake() - client.get_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lake_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_lake_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakeActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zones_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZonesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = resources.Zone() - client.get_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zone_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_zone_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZoneActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = service.ListAssetsResponse() - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = resources.Asset() - client.get_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_asset_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_asset_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = service.ListTasksResponse() - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = tasks.Task() - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = service.ListJobsResponse() - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = service.RunTaskResponse() - client.run_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.RunTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = tasks.Job() - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = None - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_environments_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = service.ListEnvironmentsResponse() - client.list_environments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListEnvironmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = analyze.Environment() - client.get_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = service.ListSessionsResponse() - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListSessionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataplexServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lakes_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_lakes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - )) - await client.get_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lake_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_lake_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakeActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_zones_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( - next_page_token='next_page_token_value', - )) - await client.list_zones(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZonesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - )) - await client.get_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_zone_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_zone_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZoneActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assets_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - await client.get_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_asset_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_asset_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tasks_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - )) - await client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_jobs_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( - next_page_token='next_page_token_value', - )) - await client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( - )) - await client.run_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.RunTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - )) - await client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_environments_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - )) - await client.list_environments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListEnvironmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - await client.get_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_sessions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListSessionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataplexServiceGrpcTransport, - ) - -def test_dataplex_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataplexServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dataplex_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataplexServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_lake', - 'update_lake', - 'delete_lake', - 'list_lakes', - 'get_lake', - 'list_lake_actions', - 'create_zone', - 'update_zone', - 'delete_zone', - 'list_zones', - 'get_zone', - 'list_zone_actions', - 'create_asset', - 'update_asset', - 'delete_asset', - 'list_assets', - 'get_asset', - 'list_asset_actions', - 'create_task', - 'update_task', - 'delete_task', - 'list_tasks', - 'get_task', - 'list_jobs', - 'run_task', - 'get_job', - 'cancel_job', - 'create_environment', - 'update_environment', - 'delete_environment', - 'list_environments', - 'get_environment', - 'list_sessions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dataplex_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataplexServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dataplex_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataplexServiceTransport() - adc.assert_called_once() - - -def test_dataplex_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataplexServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, - ], -) -def test_dataplex_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, - ], -) -def test_dataplex_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataplexServiceGrpcTransport, grpc_helpers), - (transports.DataplexServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dataplex_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_dataplex_service_host_no_port(transport_name): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_dataplex_service_host_with_port(transport_name): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_dataplex_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataplexServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dataplex_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataplexServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_dataplex_service_grpc_lro_client(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_dataplex_service_grpc_lro_async_client(): - client = DataplexServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_action_path(): - project = "squid" - location = "clam" - lake = "whelk" - action = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) - actual = DataplexServiceClient.action_path(project, location, lake, action) - assert expected == actual - - -def test_parse_action_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "action": "mussel", - } - path = DataplexServiceClient.action_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_action_path(path) - assert expected == actual - -def test_asset_path(): - project = "winkle" - location = "nautilus" - lake = "scallop" - zone = "abalone" - asset = "squid" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) - actual = DataplexServiceClient.asset_path(project, location, lake, zone, asset) - assert expected == actual - - -def test_parse_asset_path(): - expected = { - "project": "clam", - "location": "whelk", - "lake": "octopus", - "zone": "oyster", - "asset": "nudibranch", - } - path = DataplexServiceClient.asset_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_asset_path(path) - assert expected == actual - -def test_environment_path(): - project = "cuttlefish" - location = "mussel" - lake = "winkle" - environment = "nautilus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) - actual = DataplexServiceClient.environment_path(project, location, lake, environment) - assert expected == actual - - -def test_parse_environment_path(): - expected = { - "project": "scallop", - "location": "abalone", - "lake": "squid", - "environment": "clam", - } - path = DataplexServiceClient.environment_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_environment_path(path) - assert expected == actual - -def test_job_path(): - project = "whelk" - location = "octopus" - lake = "oyster" - task = "nudibranch" - job = "cuttlefish" - expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) - actual = DataplexServiceClient.job_path(project, location, lake, task, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "mussel", - "location": "winkle", - "lake": "nautilus", - "task": "scallop", - "job": "abalone", - } - path = DataplexServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_job_path(path) - assert expected == actual - -def test_lake_path(): - project = "squid" - location = "clam" - lake = "whelk" - expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - actual = DataplexServiceClient.lake_path(project, location, lake) - assert expected == actual - - -def test_parse_lake_path(): - expected = { - "project": "octopus", - "location": "oyster", - "lake": "nudibranch", - } - path = DataplexServiceClient.lake_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_lake_path(path) - assert expected == actual - -def test_session_path(): - project = "cuttlefish" - location = "mussel" - lake = "winkle" - environment = "nautilus" - session = "scallop" - expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) - actual = DataplexServiceClient.session_path(project, location, lake, environment, session) - assert expected == actual - - -def test_parse_session_path(): - expected = { - "project": "abalone", - "location": "squid", - "lake": "clam", - "environment": "whelk", - "session": "octopus", - } - path = DataplexServiceClient.session_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_session_path(path) - assert expected == actual - -def test_task_path(): - project = "oyster" - location = "nudibranch" - lake = "cuttlefish" - task = "mussel" - expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) - actual = DataplexServiceClient.task_path(project, location, lake, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "lake": "scallop", - "task": "abalone", - } - path = DataplexServiceClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_task_path(path) - assert expected == actual - -def test_zone_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - actual = DataplexServiceClient.zone_path(project, location, lake, zone) - assert expected == actual - - -def test_parse_zone_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "zone": "mussel", - } - path = DataplexServiceClient.zone_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_zone_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataplexServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DataplexServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataplexServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DataplexServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataplexServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DataplexServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DataplexServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DataplexServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataplexServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DataplexServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataplexServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py deleted file mode 100644 index a5293c1b348a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ /dev/null @@ -1,5952 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceAsyncClient -from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceClient -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.services.metadata_service import transports -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetadataServiceClient._get_default_mtls_endpoint(None) is None - assert MetadataServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetadataServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MetadataServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetadataServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetadataServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MetadataServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MetadataServiceClient._get_client_cert_source(None, False) is None - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MetadataServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MetadataServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MetadataServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MetadataServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MetadataServiceClient._get_universe_domain(None, None) == MetadataServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MetadataServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetadataServiceClient, "grpc"), - (MetadataServiceAsyncClient, "grpc_asyncio"), -]) -def test_metadata_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetadataServiceGrpcTransport, "grpc"), - (transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metadata_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetadataServiceClient, "grpc"), - (MetadataServiceAsyncClient, "grpc_asyncio"), -]) -def test_metadata_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - - -def test_metadata_service_client_get_transport_class(): - transport = MetadataServiceClient.get_transport_class() - available_transports = [ - transports.MetadataServiceGrpcTransport, - ] - assert transport in available_transports - - transport = MetadataServiceClient.get_transport_class("grpc") - assert transport == transports.MetadataServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test_metadata_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "true"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "false"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metadata_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MetadataServiceClient, MetadataServiceAsyncClient -]) -@mock.patch.object(MetadataServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceAsyncClient)) -def test_metadata_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MetadataServiceClient, MetadataServiceAsyncClient -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test_metadata_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metadata_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metadata_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_metadata_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetadataServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metadata_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.CreateEntityRequest, - dict, -]) -def test_create_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.CreateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_create_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.CreateEntityRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.CreateEntityRequest( - parent='parent_value', - ) - -def test_create_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc - request = {} - client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entity] = mock_rpc - - request = {} - await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreateEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.CreateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_create_entity_async_from_dict(): - await test_create_entity_async(request_type=dict) - -def test_create_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreateEntityRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreateEntityRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entity( - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entity - mock_val = metadata_.Entity(name='name_value') - assert arg == mock_val - - -def test_create_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entity( - metadata_.CreateEntityRequest(), - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entity( - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entity - mock_val = metadata_.Entity(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entity( - metadata_.CreateEntityRequest(), - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.UpdateEntityRequest, - dict, -]) -def test_update_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.UpdateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_update_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.UpdateEntityRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.UpdateEntityRequest( - ) - -def test_update_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc - request = {} - client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entity] = mock_rpc - - request = {} - await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.UpdateEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.UpdateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_update_entity_async_from_dict(): - await test_update_entity_async(request_type=dict) - -def test_update_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.UpdateEntityRequest() - - request.entity.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entity.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.UpdateEntityRequest() - - request.entity.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entity.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeleteEntityRequest, - dict, -]) -def test_delete_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.DeleteEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.DeleteEntityRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.DeleteEntityRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc - request = {} - client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entity] = mock_rpc - - request = {} - await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeleteEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.DeleteEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entity_async_from_dict(): - await test_delete_entity_async(request_type=dict) - -def test_delete_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeleteEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = None - client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeleteEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entity( - metadata_.DeleteEntityRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entity( - metadata_.DeleteEntityRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetEntityRequest, - dict, -]) -def test_get_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.GetEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_get_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.GetEntityRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetEntityRequest( - name='name_value', - ) - -def test_get_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc - request = {} - client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entity] = mock_rpc - - request = {} - await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.GetEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_get_entity_async_from_dict(): - await test_get_entity_async(request_type=dict) - -def test_get_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entity( - metadata_.GetEntityRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entity( - metadata_.GetEntityRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListEntitiesRequest, - dict, -]) -def test_list_entities(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.ListEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entities_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.ListEntitiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entities(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListEntitiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_entities_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entities in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc - request = {} - client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entities(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entities_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entities in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entities] = mock_rpc - - request = {} - await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entities(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entities_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListEntitiesRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.ListEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entities_async_from_dict(): - await test_list_entities_async(request_type=dict) - -def test_list_entities_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = metadata_.ListEntitiesResponse() - client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entities_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) - await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entities_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entities( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entities_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entities( - metadata_.ListEntitiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entities_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entities( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entities_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entities( - metadata_.ListEntitiesRequest(), - parent='parent_value', - ) - - -def test_list_entities_pager(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entities(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Entity) - for i in results) -def test_list_entities_pages(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entities(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entities_async_pager(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entities(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metadata_.Entity) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entities_async_pages(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entities(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metadata_.CreatePartitionRequest, - dict, -]) -def test_create_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - response = client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.CreatePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -def test_create_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.CreatePartitionRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.CreatePartitionRequest( - parent='parent_value', - ) - -def test_create_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc - request = {} - client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_partition] = mock_rpc - - request = {} - await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreatePartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - response = await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.CreatePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_partition_async_from_dict(): - await test_create_partition_async(request_type=dict) - -def test_create_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreatePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreatePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_partition( - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].partition - mock_val = metadata_.Partition(name='name_value') - assert arg == mock_val - - -def test_create_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_partition( - metadata_.CreatePartitionRequest(), - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_partition( - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].partition - mock_val = metadata_.Partition(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_partition( - metadata_.CreatePartitionRequest(), - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeletePartitionRequest, - dict, -]) -def test_delete_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.DeletePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.DeletePartitionRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.DeletePartitionRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc - request = {} - client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_partition] = mock_rpc - - request = {} - await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeletePartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.DeletePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_partition_async_from_dict(): - await test_delete_partition_async(request_type=dict) - -def test_delete_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeletePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = None - client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeletePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_partition( - metadata_.DeletePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_partition( - metadata_.DeletePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetPartitionRequest, - dict, -]) -def test_get_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - response = client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.GetPartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -def test_get_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.GetPartitionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetPartitionRequest( - name='name_value', - ) - -def test_get_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc - request = {} - client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_partition] = mock_rpc - - request = {} - await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetPartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - response = await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.GetPartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_partition_async_from_dict(): - await test_get_partition_async(request_type=dict) - -def test_get_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetPartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetPartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_partition( - metadata_.GetPartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_partition( - metadata_.GetPartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListPartitionsRequest, - dict, -]) -def test_list_partitions(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.ListPartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPartitionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_partitions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.ListPartitionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_partitions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListPartitionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_partitions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc - request = {} - client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_partitions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_partitions] = mock_rpc - - request = {} - await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_partitions_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListPartitionsRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.ListPartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPartitionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_partitions_async_from_dict(): - await test_list_partitions_async(request_type=dict) - -def test_list_partitions_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListPartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = metadata_.ListPartitionsResponse() - client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_partitions_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListPartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) - await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_partitions_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_partitions_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_partitions( - metadata_.ListPartitionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_partitions_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_partitions_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_partitions( - metadata_.ListPartitionsRequest(), - parent='parent_value', - ) - - -def test_list_partitions_pager(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_partitions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Partition) - for i in results) -def test_list_partitions_pages(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - pages = list(client.list_partitions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_partitions_async_pager(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_partitions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metadata_.Partition) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_partitions_async_pages(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_partitions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetadataServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetadataServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MetadataServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.create_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.update_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.UpdateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = None - client.delete_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeleteEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.get_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entities_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = metadata_.ListEntitiesResponse() - client.list_entities(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListEntitiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.create_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreatePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = None - client.delete_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeletePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.get_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetPartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_partitions_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = metadata_.ListPartitionsResponse() - client.list_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListPartitionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MetadataServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.create_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.update_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.UpdateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeleteEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.get_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entities_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - )) - await client.list_entities(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListEntitiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - await client.create_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreatePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeletePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - await client.get_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetPartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_partitions_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListPartitionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetadataServiceGrpcTransport, - ) - -def test_metadata_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetadataServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metadata_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetadataServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_entity', - 'update_entity', - 'delete_entity', - 'get_entity', - 'list_entities', - 'create_partition', - 'delete_partition', - 'get_partition', - 'list_partitions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_metadata_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetadataServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_metadata_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetadataServiceTransport() - adc.assert_called_once() - - -def test_metadata_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetadataServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, - ], -) -def test_metadata_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, - ], -) -def test_metadata_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetadataServiceGrpcTransport, grpc_helpers), - (transports.MetadataServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metadata_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_metadata_service_host_no_port(transport_name): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_metadata_service_host_with_port(transport_name): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - ) - -def test_metadata_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetadataServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metadata_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetadataServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_entity_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - entity = "oyster" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - actual = MetadataServiceClient.entity_path(project, location, lake, zone, entity) - assert expected == actual - - -def test_parse_entity_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "lake": "mussel", - "zone": "winkle", - "entity": "nautilus", - } - path = MetadataServiceClient.entity_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_entity_path(path) - assert expected == actual - -def test_partition_path(): - project = "scallop" - location = "abalone" - lake = "squid" - zone = "clam" - entity = "whelk" - partition = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) - actual = MetadataServiceClient.partition_path(project, location, lake, zone, entity, partition) - assert expected == actual - - -def test_parse_partition_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "zone": "mussel", - "entity": "winkle", - "partition": "nautilus", - } - path = MetadataServiceClient.partition_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_partition_path(path) - assert expected == actual - -def test_zone_path(): - project = "scallop" - location = "abalone" - lake = "squid" - zone = "clam" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - actual = MetadataServiceClient.zone_path(project, location, lake, zone) - assert expected == actual - - -def test_parse_zone_path(): - expected = { - "project": "whelk", - "location": "octopus", - "lake": "oyster", - "zone": "nudibranch", - } - path = MetadataServiceClient.zone_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_zone_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetadataServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = MetadataServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetadataServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = MetadataServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetadataServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = MetadataServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = MetadataServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = MetadataServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetadataServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = MetadataServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MetadataServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py index 550d56ca1cc1..f4a35071ac18 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py @@ -59,10 +59,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -76,6 +78,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -84,7 +88,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -102,6 +109,10 @@ ListContentResponse, UpdateContentRequest, ) +from google.cloud.dataplex_v1.types.data_discovery import ( + DataDiscoveryResult, + DataDiscoverySpec, +) from google.cloud.dataplex_v1.types.data_profile import ( DataProfileResult, DataProfileSpec, @@ -259,10 +270,12 @@ "Aspect", "AspectSource", "AspectType", + "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateEntryTypeRequest", + "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -275,6 +288,8 @@ "GetEntryGroupRequest", "GetEntryRequest", "GetEntryTypeRequest", + "GetMetadataJobRequest", + "ImportItem", "ListAspectTypesRequest", "ListAspectTypesResponse", "ListEntriesRequest", @@ -283,7 +298,10 @@ "ListEntryGroupsResponse", "ListEntryTypesRequest", "ListEntryTypesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "LookupEntryRequest", + "MetadataJob", "SearchEntriesRequest", "SearchEntriesResponse", "SearchEntriesResult", @@ -299,6 +317,8 @@ "ListContentRequest", "ListContentResponse", "UpdateContentRequest", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index dd79fdad215c..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py index d4fbe71a5618..304e760fd71c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py @@ -35,10 +35,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -52,6 +54,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -60,7 +64,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -78,6 +85,7 @@ ListContentResponse, UpdateContentRequest, ) +from .types.data_discovery import DataDiscoveryResult, DataDiscoverySpec from .types.data_profile import DataProfileResult, DataProfileSpec from .types.data_quality import ( DataQualityColumnResult, @@ -220,6 +228,7 @@ "Asset", "AssetStatus", "CancelJobRequest", + "CancelMetadataJobRequest", "CatalogServiceClient", "Content", "ContentServiceClient", @@ -236,12 +245,15 @@ "CreateEntryTypeRequest", "CreateEnvironmentRequest", "CreateLakeRequest", + "CreateMetadataJobRequest", "CreatePartitionRequest", "CreateTaskRequest", "CreateZoneRequest", "DataAccessSpec", "DataAttribute", "DataAttributeBinding", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", @@ -302,10 +314,12 @@ "GetEnvironmentRequest", "GetJobRequest", "GetLakeRequest", + "GetMetadataJobRequest", "GetPartitionRequest", "GetTaskRequest", "GetZoneRequest", "GovernanceEvent", + "ImportItem", "Job", "JobEvent", "Lake", @@ -342,6 +356,8 @@ "ListLakeActionsRequest", "ListLakesRequest", "ListLakesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "ListPartitionsRequest", "ListPartitionsResponse", "ListSessionsRequest", @@ -352,6 +368,7 @@ "ListZonesRequest", "ListZonesResponse", "LookupEntryRequest", + "MetadataJob", "MetadataServiceClient", "OperationMetadata", "Partition", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json index dcc696790241..9fb1150241c8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "CatalogServiceClient", "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, "CreateAspectType": { "methods": [ "create_aspect_type" @@ -30,6 +35,11 @@ "create_entry_type" ] }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, "DeleteAspectType": { "methods": [ "delete_aspect_type" @@ -70,6 +80,11 @@ "get_entry_type" ] }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, "ListAspectTypes": { "methods": [ "list_aspect_types" @@ -90,6 +105,11 @@ "list_entry_types" ] }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, "LookupEntry": { "methods": [ "lookup_entry" @@ -125,6 +145,11 @@ "grpc-async": { "libraryClient": "CatalogServiceAsyncClient", "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, "CreateAspectType": { "methods": [ "create_aspect_type" @@ -145,6 +170,11 @@ "create_entry_type" ] }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, "DeleteAspectType": { "methods": [ "delete_aspect_type" @@ -185,6 +215,11 @@ "get_entry_type" ] }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, "ListAspectTypes": { "methods": [ "list_aspect_types" @@ -205,6 +240,11 @@ "list_entry_types" ] }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, "LookupEntry": { "methods": [ "lookup_entry" diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index dd79fdad215c..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 76bad2927899..f5523aa8de39 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -62,10 +62,10 @@ class CatalogServiceAsyncClient: """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -86,6 +86,8 @@ class CatalogServiceAsyncClient: parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) + parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) common_billing_account_path = staticmethod( CatalogServiceClient.common_billing_account_path ) @@ -287,7 +289,7 @@ async def create_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an EntryType + r"""Creates an EntryType. .. code-block:: python @@ -322,17 +324,17 @@ async def sample_create_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): - The request object. Create EntryType Request + The request object. Create EntryType Request. parent (:class:`str`): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -424,7 +426,7 @@ async def update_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a EntryType resource. + r"""Updates an EntryType. .. code-block:: python @@ -457,9 +459,9 @@ async def sample_update_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): - The request object. Update EntryType Request + The request object. Update EntryType Request. entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -550,7 +552,7 @@ async def delete_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a EntryType resource. + r"""Deletes an EntryType. .. code-block:: python @@ -584,7 +586,7 @@ async def sample_delete_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delele EntryType Request + The request object. Delele EntryType Request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -713,7 +715,7 @@ async def sample_list_entry_types(): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -726,7 +728,7 @@ async def sample_list_entry_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: - List EntryTypes response + List EntryTypes response. Iterating over this object will yield results and resolve additional pages @@ -799,7 +801,7 @@ async def get_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryType: - r"""Retrieves a EntryType resource. + r"""Gets an EntryType. .. code-block:: python @@ -829,7 +831,7 @@ async def sample_get_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): - The request object. Get EntryType request + The request object. Get EntryType request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -906,7 +908,7 @@ async def create_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an AspectType + r"""Creates an AspectType. .. code-block:: python @@ -946,17 +948,17 @@ async def sample_create_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): - The request object. Create AspectType Request + The request object. Create AspectType Request. parent (:class:`str`): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource + Required. AspectType Resource. This corresponds to the ``aspect_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -975,9 +977,9 @@ async def sample_create_aspect_type(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1048,7 +1050,7 @@ async def update_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a AspectType resource. + r"""Updates an AspectType. .. code-block:: python @@ -1107,9 +1109,9 @@ async def sample_update_aspect_type(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1179,7 +1181,7 @@ async def delete_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a AspectType resource. + r"""Deletes an AspectType. .. code-block:: python @@ -1213,7 +1215,7 @@ async def sample_delete_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delele AspectType Request + The request object. Delele AspectType Request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1337,12 +1339,12 @@ async def sample_list_aspect_types(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): - The request object. List AspectTypes request + The request object. List AspectTypes request. parent (:class:`str`): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1355,7 +1357,7 @@ async def sample_list_aspect_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: - List AspectTypes response + List AspectTypes response. Iterating over this object will yield results and resolve additional pages @@ -1428,7 +1430,7 @@ async def get_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.AspectType: - r"""Retrieves a AspectType resource. + r"""Gets an AspectType. .. code-block:: python @@ -1458,7 +1460,7 @@ async def sample_get_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): - The request object. Get AspectType request + The request object. Get AspectType request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1474,10 +1476,10 @@ async def sample_get_aspect_type(): Returns: google.cloud.dataplex_v1.types.AspectType: - Aspect Type is a template for - creating Aspects, and represents the - JSON-schema for a given Entry, e.g., - BigQuery Table Schema. + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1537,7 +1539,7 @@ async def create_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an EntryGroup + r"""Creates an EntryGroup. .. code-block:: python @@ -1572,7 +1574,7 @@ async def sample_create_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): - The request object. Create EntryGroup Request + The request object. Create EntryGroup Request. parent (:class:`str`): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} @@ -1582,7 +1584,7 @@ async def sample_create_entry_group(): on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1675,7 +1677,7 @@ async def update_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a EntryGroup resource. + r"""Updates an EntryGroup. .. code-block:: python @@ -1708,9 +1710,9 @@ async def sample_update_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): - The request object. Update EntryGroup Request + The request object. Update EntryGroup Request. entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1802,7 +1804,7 @@ async def delete_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a EntryGroup resource. + r"""Deletes an EntryGroup. .. code-block:: python @@ -1836,7 +1838,7 @@ async def sample_delete_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): - The request object. Delele EntryGroup Request + The request object. Delete EntryGroup Request. name (:class:`str`): Required. The resource name of the EntryGroup: ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. @@ -1965,7 +1967,7 @@ async def sample_list_entry_groups(): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1978,7 +1980,7 @@ async def sample_list_entry_groups(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: - List ListEntryGroups response. + List entry groups response. Iterating over this object will yield results and resolve additional pages @@ -2051,7 +2053,7 @@ async def get_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryGroup: - r"""Retrieves a EntryGroup resource. + r"""Gets an EntryGroup. .. code-block:: python @@ -2193,7 +2195,7 @@ async def sample_create_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): - The request object. + The request object. Create Entry request. parent (:class:`str`): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2210,22 +2212,23 @@ async def sample_create_entry(): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use - Entry ID format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource - without the prefix double slashes in the API Service - Name part of Full Resource Name. This allows retrieval - of entries using their associated resource name. + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers - or systems than Google Cloud. + entries corresponding to resources from providers or + systems other than Google Cloud. The maximum size of the field is 4000 characters. @@ -2241,7 +2244,7 @@ async def sample_create_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2338,7 +2341,7 @@ async def sample_update_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): - The request object. + The request object. Update Entry request. entry (:class:`google.cloud.dataplex_v1.types.Entry`): Required. Entry resource. This corresponds to the ``entry`` field @@ -2348,8 +2351,8 @@ async def sample_update_entry(): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields - present in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2363,7 +2366,7 @@ async def sample_update_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2456,7 +2459,7 @@ async def sample_delete_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): - The request object. + The request object. Delete Entry request. name (:class:`str`): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2473,7 +2476,7 @@ async def sample_delete_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2532,7 +2535,7 @@ async def list_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEntriesAsyncPager: - r"""Lists entries within an entry group. + r"""Lists Entries within an EntryGroup. .. code-block:: python @@ -2563,7 +2566,7 @@ async def sample_list_entries(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): - The request object. + The request object. List Entries request. parent (:class:`str`): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2579,6 +2582,8 @@ async def sample_list_entries(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: + List Entries response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2650,7 +2655,12 @@ async def get_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Gets a single entry. + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -2680,7 +2690,7 @@ async def sample_get_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): - The request object. + The request object. Get Entry request. name (:class:`str`): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2697,7 +2707,7 @@ async def sample_get_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2755,7 +2765,13 @@ async def lookup_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Looks up a single entry. + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -2786,7 +2802,8 @@ async def sample_lookup_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): - The request object. + The request object. Lookup Entry request using + permissions in the source system. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2796,7 +2813,7 @@ async def sample_lookup_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2842,7 +2859,8 @@ async def search_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchEntriesAsyncPager: - r"""Searches for entries matching given query and scope. + r"""Searches for Entries matching the given query and + scope. .. code-block:: python @@ -2962,6 +2980,479 @@ async def sample_search_entries(): # Done; return the response. return response + async def create_metadata_job( + self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): + The request object. Create metadata job request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (:class:`str`): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_metadata_job( + self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): + The request object. Get metadata job request. + name (:class:`str`): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_metadata_jobs( + self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsAsyncPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): + The request object. List metadata jobs request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_metadata_jobs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetadataJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_metadata_job( + self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): + The request object. Cancel metadata job request. + name (:class:`str`): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 6c5aeb8272cf..9ad66817729a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -104,10 +104,10 @@ def get_transport_class( class CatalogServiceClient(metaclass=CatalogServiceClientMeta): """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -291,6 +291,30 @@ def parse_entry_type_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def metadata_job_path( + project: str, + location: str, + metadataJob: str, + ) -> str: + """Returns a fully-qualified metadata_job string.""" + return ( + "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( + project=project, + location=location, + metadataJob=metadataJob, + ) + ) + + @staticmethod + def parse_metadata_job_path(path: str) -> Dict[str, str]: + """Parses a metadata_job path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -740,7 +764,7 @@ def create_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an EntryType + r"""Creates an EntryType. .. code-block:: python @@ -775,17 +799,17 @@ def sample_create_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): - The request object. Create EntryType Request + The request object. Create EntryType Request. parent (str): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -874,7 +898,7 @@ def update_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a EntryType resource. + r"""Updates an EntryType. .. code-block:: python @@ -907,9 +931,9 @@ def sample_update_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): - The request object. Update EntryType Request + The request object. Update EntryType Request. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -997,7 +1021,7 @@ def delete_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a EntryType resource. + r"""Deletes an EntryType. .. code-block:: python @@ -1031,7 +1055,7 @@ def sample_delete_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delele EntryType Request + The request object. Delele EntryType Request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1157,7 +1181,7 @@ def sample_list_entry_types(): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1170,7 +1194,7 @@ def sample_list_entry_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: - List EntryTypes response + List EntryTypes response. Iterating over this object will yield results and resolve additional pages @@ -1240,7 +1264,7 @@ def get_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryType: - r"""Retrieves a EntryType resource. + r"""Gets an EntryType. .. code-block:: python @@ -1270,7 +1294,7 @@ def sample_get_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): - The request object. Get EntryType request + The request object. Get EntryType request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1344,7 +1368,7 @@ def create_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an AspectType + r"""Creates an AspectType. .. code-block:: python @@ -1384,17 +1408,17 @@ def sample_create_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): - The request object. Create AspectType Request + The request object. Create AspectType Request. parent (str): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource + Required. AspectType Resource. This corresponds to the ``aspect_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1413,9 +1437,9 @@ def sample_create_aspect_type(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1483,7 +1507,7 @@ def update_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a AspectType resource. + r"""Updates an AspectType. .. code-block:: python @@ -1542,9 +1566,9 @@ def sample_update_aspect_type(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1611,7 +1635,7 @@ def delete_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a AspectType resource. + r"""Deletes an AspectType. .. code-block:: python @@ -1645,7 +1669,7 @@ def sample_delete_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delele AspectType Request + The request object. Delele AspectType Request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1766,12 +1790,12 @@ def sample_list_aspect_types(): Args: request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): - The request object. List AspectTypes request + The request object. List AspectTypes request. parent (str): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1784,7 +1808,7 @@ def sample_list_aspect_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: - List AspectTypes response + List AspectTypes response. Iterating over this object will yield results and resolve additional pages @@ -1854,7 +1878,7 @@ def get_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.AspectType: - r"""Retrieves a AspectType resource. + r"""Gets an AspectType. .. code-block:: python @@ -1884,7 +1908,7 @@ def sample_get_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): - The request object. Get AspectType request + The request object. Get AspectType request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1900,10 +1924,10 @@ def sample_get_aspect_type(): Returns: google.cloud.dataplex_v1.types.AspectType: - Aspect Type is a template for - creating Aspects, and represents the - JSON-schema for a given Entry, e.g., - BigQuery Table Schema. + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1960,7 +1984,7 @@ def create_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an EntryGroup + r"""Creates an EntryGroup. .. code-block:: python @@ -1995,7 +2019,7 @@ def sample_create_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): - The request object. Create EntryGroup Request + The request object. Create EntryGroup Request. parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} @@ -2005,7 +2029,7 @@ def sample_create_entry_group(): on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2095,7 +2119,7 @@ def update_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a EntryGroup resource. + r"""Updates an EntryGroup. .. code-block:: python @@ -2128,9 +2152,9 @@ def sample_update_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): - The request object. Update EntryGroup Request + The request object. Update EntryGroup Request. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2219,7 +2243,7 @@ def delete_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a EntryGroup resource. + r"""Deletes an EntryGroup. .. code-block:: python @@ -2253,7 +2277,7 @@ def sample_delete_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): - The request object. Delele EntryGroup Request + The request object. Delete EntryGroup Request. name (str): Required. The resource name of the EntryGroup: ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. @@ -2379,7 +2403,7 @@ def sample_list_entry_groups(): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2392,7 +2416,7 @@ def sample_list_entry_groups(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: - List ListEntryGroups response. + List entry groups response. Iterating over this object will yield results and resolve additional pages @@ -2462,7 +2486,7 @@ def get_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryGroup: - r"""Retrieves a EntryGroup resource. + r"""Gets an EntryGroup. .. code-block:: python @@ -2601,7 +2625,7 @@ def sample_create_entry(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): - The request object. + The request object. Create Entry request. parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2618,22 +2642,23 @@ def sample_create_entry(): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use - Entry ID format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource - without the prefix double slashes in the API Service - Name part of Full Resource Name. This allows retrieval - of entries using their associated resource name. + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers - or systems than Google Cloud. + entries corresponding to resources from providers or + systems other than Google Cloud. The maximum size of the field is 4000 characters. @@ -2649,7 +2674,7 @@ def sample_create_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2743,7 +2768,7 @@ def sample_update_entry(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): - The request object. + The request object. Update Entry request. entry (google.cloud.dataplex_v1.types.Entry): Required. Entry resource. This corresponds to the ``entry`` field @@ -2753,8 +2778,8 @@ def sample_update_entry(): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields - present in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2768,7 +2793,7 @@ def sample_update_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2858,7 +2883,7 @@ def sample_delete_entry(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): - The request object. + The request object. Delete Entry request. name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2875,7 +2900,7 @@ def sample_delete_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2931,7 +2956,7 @@ def list_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEntriesPager: - r"""Lists entries within an entry group. + r"""Lists Entries within an EntryGroup. .. code-block:: python @@ -2962,7 +2987,7 @@ def sample_list_entries(): Args: request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): - The request object. + The request object. List Entries request. parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2978,6 +3003,8 @@ def sample_list_entries(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: + List Entries response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3046,7 +3073,12 @@ def get_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Gets a single entry. + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -3076,7 +3108,7 @@ def sample_get_entry(): Args: request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): - The request object. + The request object. Get Entry request. name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -3093,7 +3125,7 @@ def sample_get_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -3148,7 +3180,13 @@ def lookup_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Looks up a single entry. + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -3179,7 +3217,8 @@ def sample_lookup_entry(): Args: request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): - The request object. + The request object. Lookup Entry request using + permissions in the source system. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3189,7 +3228,7 @@ def sample_lookup_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -3233,7 +3272,8 @@ def search_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchEntriesPager: - r"""Searches for entries matching given query and scope. + r"""Searches for Entries matching the given query and + scope. .. code-block:: python @@ -3350,6 +3390,467 @@ def sample_search_entries(): # Done; return the response. return response + def create_metadata_job( + self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): + The request object. Create metadata job request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_metadata_job( + self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): + The request object. Get metadata job request. + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_metadata_jobs( + self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): + The request object. List metadata jobs request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetadataJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_metadata_job( + self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): + The request object. Cancel metadata job request. + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def __enter__(self) -> "CatalogServiceClient": return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py index 48a7fc8b34a7..27874bec60df 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py @@ -799,3 +799,155 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataJobsPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., catalog.ListMetadataJobsResponse], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[catalog.MetadataJob]: + for page in self.pages: + yield from page.metadata_jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataJobsAsyncPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: + async def async_generator(): + async for page in self.pages: + for response in page.metadata_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py index 7e054f2e5a81..eb6d4b7b6619 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -27,6 +27,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.cloud.dataplex_v1 import gapic_version as package_version from google.cloud.dataplex_v1.types import catalog @@ -352,6 +353,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_metadata_job: gapic_v1.method.wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: gapic_v1.method.wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: gapic_v1.method.wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: gapic_v1.method.wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -597,6 +618,45 @@ def search_entries( ]: raise NotImplementedError() + @property + def create_metadata_job( + self, + ) -> Callable[ + [catalog.CreateMetadataJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_metadata_job( + self, + ) -> Callable[ + [catalog.GetMetadataJobRequest], + Union[catalog.MetadataJob, Awaitable[catalog.MetadataJob]], + ]: + raise NotImplementedError() + + @property + def list_metadata_jobs( + self, + ) -> Callable[ + [catalog.ListMetadataJobsRequest], + Union[ + catalog.ListMetadataJobsResponse, + Awaitable[catalog.ListMetadataJobsResponse], + ], + ]: + raise NotImplementedError() + + @property + def cancel_metadata_job( + self, + ) -> Callable[ + [catalog.CancelMetadataJobRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py index 200495c0c780..5cf9af6bd86e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -24,6 +24,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from google.cloud.dataplex_v1.types import catalog @@ -35,10 +36,10 @@ class CatalogServiceGrpcTransport(CatalogServiceTransport): """gRPC backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. This class defines the same methods as the primary client, so the @@ -264,7 +265,7 @@ def create_entry_type( ) -> Callable[[catalog.CreateEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the create entry type method over gRPC. - Creates an EntryType + Creates an EntryType. Returns: Callable[[~.CreateEntryTypeRequest], @@ -290,7 +291,7 @@ def update_entry_type( ) -> Callable[[catalog.UpdateEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the update entry type method over gRPC. - Updates a EntryType resource. + Updates an EntryType. Returns: Callable[[~.UpdateEntryTypeRequest], @@ -316,7 +317,7 @@ def delete_entry_type( ) -> Callable[[catalog.DeleteEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the delete entry type method over gRPC. - Deletes a EntryType resource. + Deletes an EntryType. Returns: Callable[[~.DeleteEntryTypeRequest], @@ -368,7 +369,7 @@ def get_entry_type( ) -> Callable[[catalog.GetEntryTypeRequest], catalog.EntryType]: r"""Return a callable for the get entry type method over gRPC. - Retrieves a EntryType resource. + Gets an EntryType. Returns: Callable[[~.GetEntryTypeRequest], @@ -394,7 +395,7 @@ def create_aspect_type( ) -> Callable[[catalog.CreateAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the create aspect type method over gRPC. - Creates an AspectType + Creates an AspectType. Returns: Callable[[~.CreateAspectTypeRequest], @@ -420,7 +421,7 @@ def update_aspect_type( ) -> Callable[[catalog.UpdateAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the update aspect type method over gRPC. - Updates a AspectType resource. + Updates an AspectType. Returns: Callable[[~.UpdateAspectTypeRequest], @@ -446,7 +447,7 @@ def delete_aspect_type( ) -> Callable[[catalog.DeleteAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the delete aspect type method over gRPC. - Deletes a AspectType resource. + Deletes an AspectType. Returns: Callable[[~.DeleteAspectTypeRequest], @@ -498,7 +499,7 @@ def get_aspect_type( ) -> Callable[[catalog.GetAspectTypeRequest], catalog.AspectType]: r"""Return a callable for the get aspect type method over gRPC. - Retrieves a AspectType resource. + Gets an AspectType. Returns: Callable[[~.GetAspectTypeRequest], @@ -524,7 +525,7 @@ def create_entry_group( ) -> Callable[[catalog.CreateEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the create entry group method over gRPC. - Creates an EntryGroup + Creates an EntryGroup. Returns: Callable[[~.CreateEntryGroupRequest], @@ -550,7 +551,7 @@ def update_entry_group( ) -> Callable[[catalog.UpdateEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the update entry group method over gRPC. - Updates a EntryGroup resource. + Updates an EntryGroup. Returns: Callable[[~.UpdateEntryGroupRequest], @@ -576,7 +577,7 @@ def delete_entry_group( ) -> Callable[[catalog.DeleteEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the delete entry group method over gRPC. - Deletes a EntryGroup resource. + Deletes an EntryGroup. Returns: Callable[[~.DeleteEntryGroupRequest], @@ -628,7 +629,7 @@ def get_entry_group( ) -> Callable[[catalog.GetEntryGroupRequest], catalog.EntryGroup]: r"""Return a callable for the get entry group method over gRPC. - Retrieves a EntryGroup resource. + Gets an EntryGroup. Returns: Callable[[~.GetEntryGroupRequest], @@ -726,7 +727,7 @@ def list_entries( ) -> Callable[[catalog.ListEntriesRequest], catalog.ListEntriesResponse]: r"""Return a callable for the list entries method over gRPC. - Lists entries within an entry group. + Lists Entries within an EntryGroup. Returns: Callable[[~.ListEntriesRequest], @@ -750,7 +751,12 @@ def list_entries( def get_entry(self) -> Callable[[catalog.GetEntryRequest], catalog.Entry]: r"""Return a callable for the get entry method over gRPC. - Gets a single entry. + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.GetEntryRequest], @@ -774,7 +780,13 @@ def get_entry(self) -> Callable[[catalog.GetEntryRequest], catalog.Entry]: def lookup_entry(self) -> Callable[[catalog.LookupEntryRequest], catalog.Entry]: r"""Return a callable for the lookup entry method over gRPC. - Looks up a single entry. + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.LookupEntryRequest], @@ -800,7 +812,8 @@ def search_entries( ) -> Callable[[catalog.SearchEntriesRequest], catalog.SearchEntriesResponse]: r"""Return a callable for the search entries method over gRPC. - Searches for entries matching given query and scope. + Searches for Entries matching the given query and + scope. Returns: Callable[[~.SearchEntriesRequest], @@ -820,6 +833,118 @@ def search_entries( ) return self._stubs["search_entries"] + @property + def create_metadata_job( + self, + ) -> Callable[[catalog.CreateMetadataJobRequest], operations_pb2.Operation]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_job" not in self._stubs: + self._stubs["create_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob", + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_job"] + + @property + def get_metadata_job( + self, + ) -> Callable[[catalog.GetMetadataJobRequest], catalog.MetadataJob]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + ~.MetadataJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_job" not in self._stubs: + self._stubs["get_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetMetadataJob", + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs["get_metadata_job"] + + @property + def list_metadata_jobs( + self, + ) -> Callable[[catalog.ListMetadataJobsRequest], catalog.ListMetadataJobsResponse]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + ~.ListMetadataJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_jobs" not in self._stubs: + self._stubs["list_metadata_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs", + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs["list_metadata_jobs"] + + @property + def cancel_metadata_job( + self, + ) -> Callable[[catalog.CancelMetadataJobRequest], empty_pb2.Empty]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_metadata_job" not in self._stubs: + self._stubs["cancel_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob", + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_metadata_job"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py index 1e62bdf0763f..6ff45ba84779 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -26,6 +26,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -39,10 +40,10 @@ class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): """gRPC AsyncIO backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. This class defines the same methods as the primary client, so the @@ -276,7 +277,7 @@ def create_entry_type( ]: r"""Return a callable for the create entry type method over gRPC. - Creates an EntryType + Creates an EntryType. Returns: Callable[[~.CreateEntryTypeRequest], @@ -304,7 +305,7 @@ def update_entry_type( ]: r"""Return a callable for the update entry type method over gRPC. - Updates a EntryType resource. + Updates an EntryType. Returns: Callable[[~.UpdateEntryTypeRequest], @@ -332,7 +333,7 @@ def delete_entry_type( ]: r"""Return a callable for the delete entry type method over gRPC. - Deletes a EntryType resource. + Deletes an EntryType. Returns: Callable[[~.DeleteEntryTypeRequest], @@ -386,7 +387,7 @@ def get_entry_type( ) -> Callable[[catalog.GetEntryTypeRequest], Awaitable[catalog.EntryType]]: r"""Return a callable for the get entry type method over gRPC. - Retrieves a EntryType resource. + Gets an EntryType. Returns: Callable[[~.GetEntryTypeRequest], @@ -414,7 +415,7 @@ def create_aspect_type( ]: r"""Return a callable for the create aspect type method over gRPC. - Creates an AspectType + Creates an AspectType. Returns: Callable[[~.CreateAspectTypeRequest], @@ -442,7 +443,7 @@ def update_aspect_type( ]: r"""Return a callable for the update aspect type method over gRPC. - Updates a AspectType resource. + Updates an AspectType. Returns: Callable[[~.UpdateAspectTypeRequest], @@ -470,7 +471,7 @@ def delete_aspect_type( ]: r"""Return a callable for the delete aspect type method over gRPC. - Deletes a AspectType resource. + Deletes an AspectType. Returns: Callable[[~.DeleteAspectTypeRequest], @@ -524,7 +525,7 @@ def get_aspect_type( ) -> Callable[[catalog.GetAspectTypeRequest], Awaitable[catalog.AspectType]]: r"""Return a callable for the get aspect type method over gRPC. - Retrieves a AspectType resource. + Gets an AspectType. Returns: Callable[[~.GetAspectTypeRequest], @@ -552,7 +553,7 @@ def create_entry_group( ]: r"""Return a callable for the create entry group method over gRPC. - Creates an EntryGroup + Creates an EntryGroup. Returns: Callable[[~.CreateEntryGroupRequest], @@ -580,7 +581,7 @@ def update_entry_group( ]: r"""Return a callable for the update entry group method over gRPC. - Updates a EntryGroup resource. + Updates an EntryGroup. Returns: Callable[[~.UpdateEntryGroupRequest], @@ -608,7 +609,7 @@ def delete_entry_group( ]: r"""Return a callable for the delete entry group method over gRPC. - Deletes a EntryGroup resource. + Deletes an EntryGroup. Returns: Callable[[~.DeleteEntryGroupRequest], @@ -662,7 +663,7 @@ def get_entry_group( ) -> Callable[[catalog.GetEntryGroupRequest], Awaitable[catalog.EntryGroup]]: r"""Return a callable for the get entry group method over gRPC. - Retrieves a EntryGroup resource. + Gets an EntryGroup. Returns: Callable[[~.GetEntryGroupRequest], @@ -766,7 +767,7 @@ def list_entries( ) -> Callable[[catalog.ListEntriesRequest], Awaitable[catalog.ListEntriesResponse]]: r"""Return a callable for the list entries method over gRPC. - Lists entries within an entry group. + Lists Entries within an EntryGroup. Returns: Callable[[~.ListEntriesRequest], @@ -792,7 +793,12 @@ def get_entry( ) -> Callable[[catalog.GetEntryRequest], Awaitable[catalog.Entry]]: r"""Return a callable for the get entry method over gRPC. - Gets a single entry. + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.GetEntryRequest], @@ -818,7 +824,13 @@ def lookup_entry( ) -> Callable[[catalog.LookupEntryRequest], Awaitable[catalog.Entry]]: r"""Return a callable for the lookup entry method over gRPC. - Looks up a single entry. + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.LookupEntryRequest], @@ -846,7 +858,8 @@ def search_entries( ]: r"""Return a callable for the search entries method over gRPC. - Searches for entries matching given query and scope. + Searches for Entries matching the given query and + scope. Returns: Callable[[~.SearchEntriesRequest], @@ -866,6 +879,122 @@ def search_entries( ) return self._stubs["search_entries"] + @property + def create_metadata_job( + self, + ) -> Callable[ + [catalog.CreateMetadataJobRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_job" not in self._stubs: + self._stubs["create_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob", + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_job"] + + @property + def get_metadata_job( + self, + ) -> Callable[[catalog.GetMetadataJobRequest], Awaitable[catalog.MetadataJob]]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + Awaitable[~.MetadataJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_job" not in self._stubs: + self._stubs["get_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetMetadataJob", + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs["get_metadata_job"] + + @property + def list_metadata_jobs( + self, + ) -> Callable[ + [catalog.ListMetadataJobsRequest], Awaitable[catalog.ListMetadataJobsResponse] + ]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + Awaitable[~.ListMetadataJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_jobs" not in self._stubs: + self._stubs["list_metadata_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs", + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs["list_metadata_jobs"] + + @property + def cancel_metadata_job( + self, + ) -> Callable[[catalog.CancelMetadataJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_metadata_job" not in self._stubs: + self._stubs["cancel_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob", + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_metadata_job"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1089,6 +1218,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_metadata_job: self._wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: self._wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: self._wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: self._wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 6ff314334644..c8c5c1eec991 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -54,6 +54,7 @@ from google.cloud.dataplex_v1.services.data_scan_service import pagers from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -83,12 +84,16 @@ class DataScanServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE + connection_path = staticmethod(DataScanServiceClient.connection_path) + parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) parse_data_scan_job_path = staticmethod( DataScanServiceClient.parse_data_scan_job_path ) + dataset_path = staticmethod(DataScanServiceClient.dataset_path) + parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) entity_path = staticmethod(DataScanServiceClient.entity_path) parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) common_billing_account_path = staticmethod( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index 03f256f07fdc..262e38518d07 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -60,6 +60,7 @@ from google.cloud.dataplex_v1.services.data_scan_service import pagers from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -202,6 +203,30 @@ def transport(self) -> DataScanServiceTransport: """ return self._transport + @staticmethod + def connection_path( + project: str, + location: str, + connection: str, + ) -> str: + """Returns a fully-qualified connection string.""" + return ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str, str]: + """Parses a connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def data_scan_path( project: str, @@ -248,6 +273,23 @@ def parse_data_scan_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def dataset_path( + project: str, + dataset: str, + ) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def entity_path( project: str, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py index 4830d58af947..4c56742ac228 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py @@ -18,10 +18,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -35,6 +37,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -43,7 +47,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -61,6 +68,7 @@ ListContentResponse, UpdateContentRequest, ) +from .data_discovery import DataDiscoveryResult, DataDiscoverySpec from .data_profile import DataProfileResult, DataProfileSpec from .data_quality import ( DataQualityColumnResult, @@ -196,10 +204,12 @@ "Aspect", "AspectSource", "AspectType", + "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateEntryTypeRequest", + "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -212,6 +222,8 @@ "GetEntryGroupRequest", "GetEntryRequest", "GetEntryTypeRequest", + "GetMetadataJobRequest", + "ImportItem", "ListAspectTypesRequest", "ListAspectTypesResponse", "ListEntriesRequest", @@ -220,7 +232,10 @@ "ListEntryGroupsResponse", "ListEntryTypesRequest", "ListEntryTypesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "LookupEntryRequest", + "MetadataJob", "SearchEntriesRequest", "SearchEntriesResponse", "SearchEntriesResult", @@ -236,6 +251,8 @@ "ListContentRequest", "ListContentResponse", "UpdateContentRequest", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 6cfe3ecbfbb5..5cfc8ed82adc 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -62,6 +62,13 @@ "SearchEntriesRequest", "SearchEntriesResult", "SearchEntriesResponse", + "ImportItem", + "CreateMetadataJobRequest", + "GetMetadataJobRequest", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", + "CancelMetadataJobRequest", + "MetadataJob", }, ) @@ -80,12 +87,11 @@ class EntryView(proto.Enum): keys of all non-required aspects. CUSTOM (3): Returns aspects matching custom fields in - GetEntryRequest. If the number of aspects would - exceed 100, the first 100 will be returned. + GetEntryRequest. If the number of aspects + exceeds 100, the first 100 will be returned. ALL (4): Returns all aspects. If the number of aspects - would exceed 100, the first 100 will be - returned. + exceeds 100, the first 100 will be returned. """ ENTRY_VIEW_UNSPECIFIED = 0 BASIC = 1 @@ -119,9 +125,9 @@ class TransferStatus(proto.Enum): class AspectType(proto.Message): - r"""Aspect Type is a template for creating Aspects, and - represents the JSON-schema for a given Entry, e.g., BigQuery - Table Schema. + r"""AspectType is a template for creating Aspects, and represents + the JSON-schema for a given Entry, for example, BigQuery Table + Schema. Attributes: name (str): @@ -130,9 +136,9 @@ class AspectType(proto.Message): projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. uid (str): Output only. System generated globally unique - ID for the AspectType. This ID will be different - if the AspectType is deleted and re-created with - the same name. + ID for the AspectType. If you delete and + recreate the AspectType with the same name, then + this ID will be different. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the AspectType was created. @@ -147,12 +153,12 @@ class AspectType(proto.Message): Optional. User-defined labels for the AspectType. etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. + The service computes this checksum. The + client may send it on update and delete requests + to ensure it has an up-to-date value before + proceeding. authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): - Immutable. Authorization defined for this + Immutable. Defines the Authorization for this type. metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): Required. MetadataTemplate of the aspect. @@ -163,13 +169,13 @@ class AspectType(proto.Message): """ class Authorization(proto.Message): - r"""Autorization for an Aspect Type. + r"""Autorization for an AspectType. Attributes: alternate_use_permission (str): Immutable. The IAM permission grantable on - the Entry Group to allow access to instantiate - Aspects of Dataplex owned Aspect Types, only + the EntryGroup to allow access to instantiate + Aspects of Dataplex owned AspectTypes, only settable for Dataplex owned Types. """ @@ -179,7 +185,7 @@ class Authorization(proto.Message): ) class MetadataTemplate(proto.Message): - r"""MetadataTemplate definition for AspectType + r"""MetadataTemplate definition for an AspectType. Attributes: index (int): @@ -196,48 +202,59 @@ class MetadataTemplate(proto.Message): name (str): Required. The name of the field. type_ (str): - Required. The datatype of this field. The - following values are supported: Primitive types - (string, integer, boolean, double, datetime); - datetime must be of the format RFC3339 UTC - "Zulu" (Examples: - - "2014-10-02T15:01:23Z" and - "2014-10-02T15:01:23.045123456Z"). Complex types - (enum, array, map, record). + Required. The datatype of this field. The following values + are supported: + + Primitive types: + + - string + - integer + - boolean + - double + - datetime. Must be of the format RFC3339 UTC "Zulu" + (Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z"). + + Complex types: + + - enum + - array + - map + - record record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): - Optional. Field definition, needs to be - specified if the type is record. Defines the - nested fields. + Optional. Field definition. You must specify + it if the type is record. It defines the nested + fields. enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): Optional. The list of values for an enum - type. Needs to be defined if the type is enum. + type. You must define it if the type is enum. map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. map_items needs to be set if the type is map. - map_items can refer to a primitive field or a complex - (record only) field. To specify a primitive field, just name - and type needs to be set in the nested MetadataTemplate. The - recommended value for the name field is item, as this is not - used in the actual payload. + Optional. If the type is map, set map_items. map_items can + refer to a primitive field or a complex (record only) field. + To specify a primitive field, you only need to set name and + type in the nested MetadataTemplate. The recommended value + for the name field is item, as this isn't used in the actual + payload. array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. array_items needs to be set if the type is array. - array_items can refer to a primitive field or a complex - (record only) field. To specify a primitive field, just name - and type needs to be set in the nested MetadataTemplate. The - recommended value for the name field is item, as this is not + Optional. If the type is array, set array_items. array_items + can refer to a primitive field or a complex (record only) + field. To specify a primitive field, you only need to set + name and type in the nested MetadataTemplate. The + recommended value for the name field is item, as this isn't used in the actual payload. type_id (str): - Optional. Id can be used if this definition - of the field needs to be reused later. Id needs - to be unique across the entire template. Id can - only be specified if the field type is record. + Optional. You can use type id if this + definition of the field needs to be reused + later. The type id must be unique across the + entire template. You can only specify it if the + field type is record. type_ref (str): Optional. A reference to another field - definition (instead of an inline definition). - The value must be equal to the value of an id - field defined elsewhere in the MetadataTemplate. - Only fields with type as record can refer to - other fields. + definition (not an inline definition). The value + must be equal to the value of an id field + defined elsewhere in the MetadataTemplate. Only + fields with record type can refer to other + fields. constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): Optional. Specifies the constraints on this field. @@ -247,18 +264,18 @@ class MetadataTemplate(proto.Message): """ class EnumValue(proto.Message): - r"""Definition of Enumvalue (to be used by enum fields) + r"""Definition of Enumvalue, to be used for enum fields. Attributes: index (int): - Required. Index for the enum. Cannot be - modified. + Required. Index for the enum value. It can't + be modified. name (str): Required. Name of the enumvalue. This is the - actual value that the aspect will contain. + actual value that the aspect can contain. deprecated (str): - Optional. Optional deprecation message to be - set if an enum value needs to be deprecated. + Optional. You can set this message if you + need to deprecate an enum value. """ index: int = proto.Field( @@ -275,12 +292,12 @@ class EnumValue(proto.Message): ) class Constraints(proto.Message): - r"""Definition of the constraints of a field + r"""Definition of the constraints of a field. Attributes: required (bool): - Optional. Marks this as an optional/required - field. + Optional. Marks this field as optional or + required. """ required: bool = proto.Field( @@ -289,33 +306,32 @@ class Constraints(proto.Message): ) class Annotations(proto.Message): - r"""Definition of the annotations of a field + r"""Definition of the annotations of a field. Attributes: deprecated (str): - Optional. Marks a field as deprecated, a - deprecation message can be included. + Optional. Marks a field as deprecated. You + can include a deprecation message. display_name (str): - Optional. Specify a displayname for a field. + Optional. Display name for a field. description (str): - Optional. Specify a description for a field + Optional. Description for a field. display_order (int): - Optional. Specify a display order for a - field. Display order can be used to reorder - where a field is rendered + Optional. Display order for a field. You can + use this to reorder where a field is rendered. string_type (str): - Optional. String Type annotations can be used - to specify special meaning to string fields. The - following values are supported: richText: - - The field must be interpreted as a rich text - field. url: A fully qualified url link. - resource: A service qualified resource - reference. + Optional. You can use String Type annotations to specify + special meaning to string fields. The following values are + supported: + + - richText: The field must be interpreted as a rich text + field. + - url: A fully qualified URL link. + - resource: A service qualified resource reference. string_values (MutableSequence[str]): Optional. Suggested hints for string fields. - These can be used to suggest values to users, - through an UI for example. + You can use them to suggest values to users + through console. """ deprecated: str = proto.Field( @@ -457,13 +473,13 @@ class EntryGroup(proto.Message): Attributes: name (str): Output only. The relative resource name of the EntryGroup, - of the form: - projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}. + in the format + projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. uid (str): Output only. System generated globally unique - ID for the EntryGroup. This ID will be different - if the EntryGroup is deleted and re-created with - the same name. + ID for the EntryGroup. If you delete and + recreate the EntryGroup with the same name, this + ID will be different. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the EntryGroup was created. @@ -478,10 +494,10 @@ class EntryGroup(proto.Message): Optional. User-defined labels for the EntryGroup. etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. + This checksum is computed by the service, and + might be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. transfer_status (google.cloud.dataplex_v1.types.TransferStatus): Output only. Denotes the transfer status of the Entry Group. It is unspecified for Entry @@ -558,13 +574,13 @@ class EntryType(proto.Message): EntryType. etag (str): Optional. This checksum is computed by the - server based on the value of other fields, and - may be sent on update and delete requests to - ensure the client has an up-to-date value before - proceeding. + service, and might be sent on update and delete + requests to ensure the client has an up-to-date + value before proceeding. type_aliases (MutableSequence[str]): - Optional. Indicates the class this Entry Type - belongs to, for example, TABLE, DATABASE, MODEL. + Optional. Indicates the classes this Entry + Type belongs to, for example, TABLE, DATABASE, + MODEL. platform (str): Optional. The platform that Entries of this type belongs to. @@ -684,11 +700,12 @@ class Aspect(proto.Message): Output only. The time when the Aspect was last updated. data (google.protobuf.struct_pb2.Struct): - Required. The content of the aspect, according to its aspect - type schema. This will replace ``content``. The maximum size - of the field is 120KB (encoded as UTF-8). + Required. The content of the aspect, + according to its aspect type schema. The maximum + size of the field is 120KB (encoded as UTF-8). aspect_source (google.cloud.dataplex_v1.types.AspectSource): - + Optional. Information related to the source + system of the aspect. """ aspect_type: str = proto.Field( @@ -722,16 +739,21 @@ class Aspect(proto.Message): class AspectSource(proto.Message): - r"""AspectSource contains source system related information for - the aspect. + r"""Information related to the source system of the aspect. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): - The create time of the aspect in the source + The time the aspect was created in the source system. update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the aspect in the source - system. + The time the aspect was last updated in the + source system. + data_version (str): + The version of the data format used to + produce this data. This field is used to + indicated when the underlying data format + changes (e.g., schema modifications, changes to + the source URL format definition, etc). """ create_time: timestamp_pb2.Timestamp = proto.Field( @@ -744,48 +766,52 @@ class AspectSource(proto.Message): number=11, message=timestamp_pb2.Timestamp, ) + data_version: str = proto.Field( + proto.STRING, + number=12, + ) class Entry(proto.Message): - r"""An entry is a representation of a data asset which can be + r"""An entry is a representation of a data resource that can be described by various metadata. Attributes: name (str): - Identifier. The relative resource name of the Entry, of the - form: - projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}. + Identifier. The relative resource name of the entry, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. entry_type (str): - Required. Immutable. The resource name of the - EntryType used to create this Entry. + Required. Immutable. The relative resource name of the entry + type that was used to create this entry, in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry was - created. + Output only. The time when the entry was + created in Dataplex. update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry was last - updated. + Output only. The time when the entry was last + updated in Dataplex. aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): - Optional. The Aspects attached to the Entry. - The format for the key can be one of the - following: - - 1. {projectId}.{locationId}.{aspectTypeId} (if - the aspect is attached directly to the - entry) - 2. - {projectId}.{locationId}.{aspectTypeId}@{path} - (if the aspect is attached to an entry's - path) + Optional. The aspects that are attached to the entry. + Depending on how the aspect is attached to the entry, the + format of the aspect key can be one of the following: + + - If the aspect is attached directly to the entry: + ``{project_id_or_number}.{location_id}.{aspect_type_id}`` + - If the aspect is attached to an entry's path: + ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` parent_entry (str): Optional. Immutable. The resource name of the parent entry. fully_qualified_name (str): - Optional. A name for the entry that can - reference it in an external system. The maximum - size of the field is 4000 characters. + Optional. A name for the entry that can be referenced by an + external system. For more information, see `Fully qualified + names `__. + The maximum size of the field is 4000 characters. entry_source (google.cloud.dataplex_v1.types.EntrySource): - Optional. Source system related information - for an entry. + Optional. Information related to the source + system of the data resource that is represented + by the entry. """ name: str = proto.Field( @@ -828,52 +854,55 @@ class Entry(proto.Message): class EntrySource(proto.Message): - r"""EntrySource contains source system related information for - the entry. + r"""Information related to the source system of the data resource + that is represented by the entry. Attributes: resource (str): The name of the resource in the source - system. The maximum size of the field is 4000 - characters. + system. Maximum length is 4,000 characters. system (str): The name of the source system. - The maximum size of the field is 64 characters. + Maximum length is 64 characters. platform (str): The platform containing the source system. - The maximum size of the field is 64 characters. + Maximum length is 64 characters. display_name (str): - User friendly display name. - The maximum size of the field is 500 characters. + A user-friendly display name. + Maximum length is 500 characters. description (str): - Description of the Entry. - The maximum size of the field is 2000 - characters. + A description of the data resource. + Maximum length is 2,000 characters. labels (MutableMapping[str, str]): User-defined labels. The maximum size of keys and values is 128 characters each. ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): - Immutable. The ancestors of the Entry in the - source system. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The create time of the resource in the source + Immutable. The entries representing the + ancestors of the data resource in the source system. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was created in the + source system. update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the resource in the source - system. + The time when the resource was last updated in the source + system. If the entry exists in the system and its + ``EntrySource`` has ``update_time`` populated, further + updates to the ``EntrySource`` of the entry must provide + incremental updates to its ``update_time``. location (str): Output only. Location of the resource in the - source system. Entry will be searchable by this + source system. You can search the entry by this location. By default, this should match the - location of the EntryGroup containing this - entry. A different value allows capturing source - location for data external to GCP. + location of the entry group containing this + entry. A different value allows capturing the + source location for data external to Google + Cloud. """ class Ancestor(proto.Message): - r"""Ancestor contains information about individual items in the - hierarchy of an Entry. + r"""Information about individual items in the hierarchy that is + associated with the data resource. Attributes: name (str): @@ -938,7 +967,7 @@ class Ancestor(proto.Message): class CreateEntryGroupRequest(proto.Message): - r"""Create EntryGroup Request + r"""Create EntryGroup Request. Attributes: parent (str): @@ -948,10 +977,11 @@ class CreateEntryGroupRequest(proto.Message): entry_group_id (str): Required. EntryGroup identifier. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -974,16 +1004,17 @@ class CreateEntryGroupRequest(proto.Message): class UpdateEntryGroupRequest(proto.Message): - r"""Update EntryGroup Request + r"""Update EntryGroup Request. Attributes: entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request, + without performing any mutations. The default is + false. """ entry_group: "EntryGroup" = proto.Field( @@ -1003,7 +1034,7 @@ class UpdateEntryGroupRequest(proto.Message): class DeleteEntryGroupRequest(proto.Message): - r"""Delele EntryGroup Request + r"""Delete EntryGroup Request. Attributes: name (str): @@ -1013,7 +1044,7 @@ class DeleteEntryGroupRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteEntryGroupRequest method returns an - ABORTED error response + ABORTED error response. """ name: str = proto.Field( @@ -1034,18 +1065,18 @@ class ListEntryGroupsRequest(proto.Message): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of EntryGroups to return. The service may return fewer than this - value. If unspecified, at most 10 EntryGroups - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 EntryGroups. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListEntryGroups`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEntryGroups`` must match the call that + subsequent page. When paginating, all other parameters you + provide to ``ListEntryGroups`` must match the call that provided the page token. filter (str): Optional. Filter request. @@ -1076,18 +1107,17 @@ class ListEntryGroupsRequest(proto.Message): class ListEntryGroupsResponse(proto.Message): - r"""List ListEntryGroups response. + r"""List entry groups response. Attributes: entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): - ListEntryGroups under the given parent - location. + Entry groups under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1125,20 +1155,21 @@ class GetEntryGroupRequest(proto.Message): class CreateEntryTypeRequest(proto.Message): - r"""Create EntryType Request + r"""Create EntryType Request. Attributes: parent (str): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. entry_type_id (str): Required. EntryType identifier. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -1161,16 +1192,17 @@ class CreateEntryTypeRequest(proto.Message): class UpdateEntryTypeRequest(proto.Message): - r"""Update EntryType Request + r"""Update EntryType Request. Attributes: entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ entry_type: "EntryType" = proto.Field( @@ -1190,7 +1222,7 @@ class UpdateEntryTypeRequest(proto.Message): class DeleteEntryTypeRequest(proto.Message): - r"""Delele EntryType Request + r"""Delele EntryType Request. Attributes: name (str): @@ -1200,7 +1232,7 @@ class DeleteEntryTypeRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteEntryTypeRequest method returns an ABORTED - error response + error response. """ name: str = proto.Field( @@ -1221,32 +1253,32 @@ class ListEntryTypesRequest(proto.Message): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of EntryTypes to return. The service may return fewer than this - value. If unspecified, at most 10 EntryTypes - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 EntryTypes. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListEntryTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters + subsequent page. When paginating, all other parameters you provided to ``ListEntryTypes`` must match the call that provided the page token. filter (str): - Optional. Filter request. Filters are - case-sensitive. The following formats are - supported: - - labels.key1 = "value1" - labels:key1 - name = "value" - These restrictions can be coinjoined with AND, - OR and NOT conjunctions. + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. """ parent: str = proto.Field( @@ -1272,18 +1304,17 @@ class ListEntryTypesRequest(proto.Message): class ListEntryTypesResponse(proto.Message): - r"""List EntryTypes response + r"""List EntryTypes response. Attributes: entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): - ListEntryTypes under the given parent - location. + EntryTypes under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1306,7 +1337,7 @@ def raw_page(self): class GetEntryTypeRequest(proto.Message): - r"""Get EntryType request + r"""Get EntryType request. Attributes: name (str): @@ -1321,20 +1352,21 @@ class GetEntryTypeRequest(proto.Message): class CreateAspectTypeRequest(proto.Message): - r"""Create AspectType Request + r"""Create AspectType Request. Attributes: parent (str): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. aspect_type_id (str): Required. AspectType identifier. aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource + Required. AspectType Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -1386,7 +1418,7 @@ class UpdateAspectTypeRequest(proto.Message): class DeleteAspectTypeRequest(proto.Message): - r"""Delele AspectType Request + r"""Delele AspectType Request. Attributes: name (str): @@ -1396,7 +1428,7 @@ class DeleteAspectTypeRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteAspectTypeRequest method returns an - ABORTED error response + ABORTED error response. """ name: str = proto.Field( @@ -1410,39 +1442,39 @@ class DeleteAspectTypeRequest(proto.Message): class ListAspectTypesRequest(proto.Message): - r"""List AspectTypes request + r"""List AspectTypes request. Attributes: parent (str): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of AspectTypes to return. The service may return fewer than this - value. If unspecified, at most 10 AspectTypes - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 AspectTypes. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListAspectTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListAspectTypes`` must match the call that + subsequent page. When paginating, all other parameters you + provide to ``ListAspectTypes`` must match the call that provided the page token. filter (str): - Optional. Filter request. Filters are - case-sensitive. The following formats are - supported: - - labels.key1 = "value1" - labels:key1 - name = "value" - These restrictions can be coinjoined with AND, - OR and NOT conjunctions. + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. """ parent: str = proto.Field( @@ -1468,18 +1500,17 @@ class ListAspectTypesRequest(proto.Message): class ListAspectTypesResponse(proto.Message): - r"""List AspectTypes response + r"""List AspectTypes response. Attributes: aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): - ListAspectTypes under the given parent - location. + AspectTypes under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1502,7 +1533,7 @@ def raw_page(self): class GetAspectTypeRequest(proto.Message): - r"""Get AspectType request + r"""Get AspectType request. Attributes: name (str): @@ -1517,7 +1548,7 @@ class GetAspectTypeRequest(proto.Message): class CreateEntryRequest(proto.Message): - r""" + r"""Create Entry request. Attributes: parent (str): @@ -1527,22 +1558,22 @@ class CreateEntryRequest(proto.Message): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use Entry ID - format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource without - the prefix double slashes in the API Service Name part of - Full Resource Name. This allows retrieval of entries using - their associated resource name. + Entries corresponding to Google Cloud resources use an Entry + ID format based on `full resource + names `__. + The format is a full resource name of the resource without + the prefix double slashes in the API service name part of + the full resource name. This allows retrieval of entries + using their associated resource name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers or - systems than Google Cloud. + entries corresponding to resources from providers or systems + other than Google Cloud. The maximum size of the field is 4000 characters. entry (google.cloud.dataplex_v1.types.Entry): @@ -1565,7 +1596,7 @@ class CreateEntryRequest(proto.Message): class UpdateEntryRequest(proto.Message): - r""" + r"""Update Entry request. Attributes: entry (google.cloud.dataplex_v1.types.Entry): @@ -1574,31 +1605,35 @@ class UpdateEntryRequest(proto.Message): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields present - in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. allow_missing (bool): - Optional. If set to true and the entry does - not exist, it will be created. + Optional. If set to true and the entry + doesn't exist, the service will create it. delete_missing_aspects (bool): Optional. If set to true and the aspect_keys specify aspect - ranges, any existing aspects from that range not provided in - the request will be deleted. + ranges, the service deletes any existing aspects from that + range that weren't provided in the request. aspect_keys (MutableSequence[str]): - Optional. The map keys of the Aspects which should be - modified. Supports the following syntaxes: - - - - matches aspect on given type - and empty path - - @path - matches aspect on given - type and specified path - - \* - matches aspects on given type - for all paths - - \*@path - matches aspects of all types on the given path - - Existing aspects matching the syntax will not be removed - unless ``delete_missing_aspects`` is set to true. - - If this field is left empty, it will be treated as + Optional. The map keys of the Aspects which the service + should modify. It supports the following syntaxes: + + - ```` - matches an aspect of the + given type and empty path. + - ``@path`` - matches an aspect of + the given type and specified path. For example, to attach + an aspect to a field that is specified by the ``schema`` + aspect, the path should have the format + ``Schema.``. + - ``*`` - matches aspects of the + given type for all paths. + - ``*@path`` - matches aspects of all types on the given + path. + + The service will not remove existing aspects matching the + syntax unless ``delete_missing_aspects`` is set to true. + + If this field is left empty, the service treats it as specifying exactly those Aspects present in the request. """ @@ -1627,7 +1662,7 @@ class UpdateEntryRequest(proto.Message): class DeleteEntryRequest(proto.Message): - r""" + r"""Delete Entry request. Attributes: name (str): @@ -1642,31 +1677,46 @@ class DeleteEntryRequest(proto.Message): class ListEntriesRequest(proto.Message): - r""" + r"""List Entries request. Attributes: parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. page_size (int): - + Optional. Number of items to return per page. If there are + remaining results, the service returns a next_page_token. If + unspecified, the service returns at most 10 Entries. The + maximum value is 100; values above 100 will be coerced to + 100. page_token (str): - Optional. The pagination token returned by a - previous request. + Optional. Page token received from a previous + ``ListEntries`` call. Provide this to retrieve the + subsequent page. filter (str): Optional. A filter on the entries to return. Filters are - case-sensitive. The request can be filtered by the following - fields: entry_type, entry_source.display_name. The - comparison operators are =, !=, <, >, <=, >= (strings are - compared according to lexical order) The logical operators - AND, OR, NOT can be used in the filter. Wildcard "*" can be - used, but for entry_type the full project id or number needs - to be provided. Example filter expressions: - `entry_source.display_name=AnExampleDisplayName` - `entry_type=projects/example-project/locations/global/entryTypes/example-entry_type` - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` `NOT - entry_source.display_name=AnotherExampleDisplayName`. + case-sensitive. You can filter the request by the following + fields: + + - entry_type + - entry_source.display_name + + The comparison operators are =, !=, <, >, <=, >=. The + service compares strings according to lexical order. + + You can use the logical operators AND, OR, NOT in the + filter. + + You can use Wildcard "*", but for entry_type you need to + provide the full project id or number. + + Example filter expressions: + + - "entry_source.display_name=AnExampleDisplayName" + - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" + - "entry_type=projects/example-project/locations/us/entryTypes/a\* + OR entry_type=projects/another-project/locations/\*" + - "NOT entry_source.display_name=AnotherExampleDisplayName". """ parent: str = proto.Field( @@ -1688,13 +1738,16 @@ class ListEntriesRequest(proto.Message): class ListEntriesResponse(proto.Message): - r""" + r"""List Entries response. Attributes: entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): - The list of entries. + The list of entries under the given parent + location. next_page_token (str): - Pagination token. + Token to retrieve the next page of results, + or empty if there are no more results in the + list. """ @property @@ -1713,24 +1766,23 @@ def raw_page(self): class GetEntryRequest(proto.Message): - r""" + r"""Get Entry request. Attributes: name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. view (google.cloud.dataplex_v1.types.EntryView): - Optional. View for controlling which parts of - an entry are to be returned. + Optional. View to control which parts of an + entry the service should return. aspect_types (MutableSequence[str]): Optional. Limits the aspects returned to the - provided aspect types. Only works if the CUSTOM - view is selected. + provided aspect types. It only works for CUSTOM + view. paths (MutableSequence[str]): Optional. Limits the aspects returned to those associated with the provided paths within - the Entry. Only works if the CUSTOM view is - selected. + the Entry. It only works for CUSTOM view. """ name: str = proto.Field( @@ -1753,7 +1805,7 @@ class GetEntryRequest(proto.Message): class LookupEntryRequest(proto.Message): - r""" + r"""Lookup Entry request using permissions in the source system. Attributes: name (str): @@ -1761,17 +1813,16 @@ class LookupEntryRequest(proto.Message): attributed in the following form: ``projects/{project}/locations/{location}``. view (google.cloud.dataplex_v1.types.EntryView): - Optional. View for controlling which parts of - an entry are to be returned. + Optional. View to control which parts of an + entry the service should return. aspect_types (MutableSequence[str]): Optional. Limits the aspects returned to the - provided aspect types. Only works if the CUSTOM - view is selected. + provided aspect types. It only works for CUSTOM + view. paths (MutableSequence[str]): Optional. Limits the aspects returned to those associated with the provided paths within - the Entry. Only works if the CUSTOM view is - selected. + the Entry. It only works for CUSTOM view. entry (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -1812,18 +1863,21 @@ class SearchEntriesRequest(proto.Message): Required. The query against which entries in scope should be matched. page_size (int): - Optional. Pagination. + Optional. Number of results in the search page. If <=0, then + defaults to 10. Max limit for page_size is 1000. Throws an + invalid argument for page_size > 1000. page_token (str): - + Optional. Page token received from a previous + ``SearchEntries`` call. Provide this to retrieve the + subsequent page. order_by (str): - Optional. Ordering of the results. Supported - options to be added later. + Optional. Specifies the ordering of results. scope (str): Optional. The scope under which the search should be - operating. Should either be organizations/ or - projects/. If left unspecified, it will default - to the organization where the project provided in ``name`` - is located. + operating. It must either be ``organizations/`` or + ``projects/``. If it is unspecified, it + defaults to the organization where the project provided in + ``name`` is located. """ name: str = proto.Field( @@ -1903,12 +1957,16 @@ class SearchEntriesResponse(proto.Message): The results matching the search query. total_size (int): The estimated total number of matching - entries. Not guaranteed to be accurate. + entries. This number isn't guaranteed to be + accurate. next_page_token (str): - Pagination token. + Token to retrieve the next page of results, + or empty if there are no more results in the + list. unreachable (MutableSequence[str]): - Unreachable locations. Search results don't - include data from those locations. + Locations that the service couldn't reach. + Search results don't include data from these + locations. """ @property @@ -1934,4 +1992,644 @@ def raw_page(self): ) +class ImportItem(proto.Message): + r"""An object that describes the values that you want to set for an + entry and its attached aspects when you import metadata. Used when + you run a metadata import job. See + [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. + + You provide a collection of import items in a metadata import file. + For more information about how to create a metadata import file, see + `Metadata import + file `__. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Information about an entry and its attached + aspects. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update, in paths that are relative to the + ``Entry`` resource. Separate each field with a comma. + + In ``FULL`` entry sync mode, Dataplex includes the paths of + all of the fields for an entry that can be modified, + including aspects. This means that Dataplex replaces the + existing entry with the entry in the metadata import file. + All modifiable fields are updated, regardless of the fields + that are listed in the update mask, and regardless of + whether a field is present in the ``entry`` object. + + The ``update_mask`` field is ignored when an entry is + created or re-created. + + Dataplex also determines which entries and aspects to modify + by comparing the values and timestamps that you provide in + the metadata import file with the values and timestamps that + exist in your project. For more information, see `Comparison + logic `__. + aspect_keys (MutableSequence[str]): + The aspects to modify. Supports the following syntaxes: + + - ``{aspect_type_reference}``: matches aspects that belong + to the specified aspect type and are attached directly to + the entry. + - ``{aspect_type_reference}@{path}``: matches aspects that + belong to the specified aspect type and path. + - ``{aspect_type_reference}@*``: matches aspects that + belong to the specified aspect type for all paths. + + Replace ``{aspect_type_reference}`` with a reference to the + aspect type, in the format + ``{project_id_or_number}.{location_id}.{aspect_type_id}``. + + If you leave this field empty, it is treated as specifying + exactly those aspects that are present within the specified + entry. + + In ``FULL`` entry sync mode, Dataplex implicitly adds the + keys for all of the required aspects of an entry. + """ + + entry: "Entry" = proto.Field( + proto.MESSAGE, + number=1, + message="Entry", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateMetadataJobRequest(proto.Message): + r"""Create metadata job request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique ID + is generated with the prefix ``metadata-job-``. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + metadata_job: "MetadataJob" = proto.Field( + proto.MESSAGE, + number=2, + message="MetadataJob", + ) + metadata_job_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetMetadataJobRequest(proto.Message): + r"""Get metadata job request. + + Attributes: + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMetadataJobsRequest(proto.Message): + r"""List metadata jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + page_size (int): + Optional. The maximum number of metadata jobs + to return. The service might return fewer jobs + than this value. If unspecified, at most 10 jobs + are returned. The maximum value is 1,000. + page_token (str): + Optional. The page token received from a previous + ``ListMetadataJobs`` call. Provide this token to retrieve + the subsequent page of results. When paginating, all other + parameters that are provided to the ``ListMetadataJobs`` + request must match the call that provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - ``labels.key1 = "value1"`` + - ``labels:key1`` + - ``name = "value"`` + + You can combine filters with ``AND``, ``OR``, and ``NOT`` + operators. + order_by (str): + Optional. The field to sort the results by, either ``name`` + or ``create_time``. If not specified, the ordering is + undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMetadataJobsResponse(proto.Message): + r"""List metadata jobs response. + + Attributes: + metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): + Metadata jobs under the specified parent + location. + next_page_token (str): + A token to retrieve the next page of results. + If there are no more results in the list, the + value is empty. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + metadata_jobs: MutableSequence["MetadataJob"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MetadataJob", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CancelMetadataJobRequest(proto.Message): + r"""Cancel metadata job request. + + Attributes: + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataJob(proto.Message): + r"""A metadata job resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The name of the resource that the + configuration is applied to, in the format + ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + uid (str): + Output only. A system-generated, globally + unique ID for the metadata job. If the metadata + job is deleted and then re-created with the same + name, this ID is different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels. + type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): + Required. Metadata job type. + import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): + Import job specification. + + This field is a member of `oneof`_ ``spec``. + import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): + Output only. Import job result. + + This field is a member of `oneof`_ ``result``. + status (google.cloud.dataplex_v1.types.MetadataJob.Status): + Output only. Metadata job status. + """ + + class Type(proto.Enum): + r"""Metadata job type. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + IMPORT (1): + Import job. + """ + TYPE_UNSPECIFIED = 0 + IMPORT = 1 + + class ImportJobResult(proto.Message): + r"""Results from a metadata import job. + + Attributes: + deleted_entries (int): + Output only. The total number of entries that + were deleted. + updated_entries (int): + Output only. The total number of entries that + were updated. + created_entries (int): + Output only. The total number of entries that + were created. + unchanged_entries (int): + Output only. The total number of entries that + were unchanged. + recreated_entries (int): + Output only. The total number of entries that + were recreated. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + + deleted_entries: int = proto.Field( + proto.INT64, + number=1, + ) + updated_entries: int = proto.Field( + proto.INT64, + number=2, + ) + created_entries: int = proto.Field( + proto.INT64, + number=3, + ) + unchanged_entries: int = proto.Field( + proto.INT64, + number=4, + ) + recreated_entries: int = proto.Field( + proto.INT64, + number=6, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + class ImportJobSpec(proto.Message): + r"""Job specification for a metadata import job + + Attributes: + source_storage_uri (str): + Optional. The URI of a Cloud Storage bucket or folder + (beginning with ``gs://`` and ending with ``/``) that + contains the metadata import files for this job. + + A metadata import file defines the values to set for each of + the entries and aspects in a metadata job. For more + information about how to create a metadata import file and + the file requirements, see `Metadata import + file `__. + + You can provide multiple metadata import files in the same + metadata job. The bucket or folder must contain at least one + metadata import file, in JSON Lines format (either ``.json`` + or ``.jsonl`` file extension). + + In ``FULL`` entry sync mode, don't save the metadata import + file in a folder named ``SOURCE_STORAGE_URI/deletions/``. + + **Caution**: If the metadata import file contains no data, + all entries and aspects that belong to the job's scope are + deleted. + source_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the process that + created the metadata import files began. + scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): + Required. A boundary on the scope of impact + that the metadata import job can have. + entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for entries. Only ``FULL`` mode is + supported for entries. All entries in the job's scope are + modified. If an entry exists in Dataplex but isn't included + in the metadata import file, the entry is deleted when you + run the metadata job. + aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for aspects. Only ``INCREMENTAL`` + mode is supported for aspects. An aspect is modified only if + the metadata import file includes a reference to the aspect + in the ``update_mask`` field and the ``aspect_keys`` field. + log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): + Optional. The level of logs to write to Cloud Logging for + this job. + + Debug-level logs provide highly-detailed information for + troubleshooting, but their increased verbosity could incur + `additional + costs `__ that + might not be merited for all jobs. + + If unspecified, defaults to ``INFO``. + """ + + class SyncMode(proto.Enum): + r"""Specifies how the entries and aspects in a metadata job are + updated. + + Values: + SYNC_MODE_UNSPECIFIED (0): + Sync mode unspecified. + FULL (1): + All resources in the job's scope are + modified. If a resource exists in Dataplex but + isn't included in the metadata import file, the + resource is deleted when you run the metadata + job. Use this mode to perform a full sync of the + set of entries in the job scope. + INCREMENTAL (2): + Only the entries and aspects that are + explicitly included in the metadata import file + are modified. Use this mode to modify a subset + of resources while leaving unreferenced + resources unchanged. + """ + SYNC_MODE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + + class LogLevel(proto.Enum): + r"""The level of logs to write to Cloud Logging for this job. + + Values: + LOG_LEVEL_UNSPECIFIED (0): + Log level unspecified. + DEBUG (1): + Debug-level logging. Captures detailed logs for each import + item. Use debug-level logging to troubleshoot issues with + specific import items. For example, use debug-level logging + to identify resources that are missing from the job scope, + entries or aspects that don't conform to the associated + entry type or aspect type, or other misconfigurations with + the metadata import file. + + Depending on the size of your metadata job and the number of + logs that are generated, debug-level logging might incur + `additional + costs `__. + INFO (2): + Info-level logging. Captures logs at the + overall job level. Includes aggregate logs about + import items, but doesn't specify which import + item has an error. + """ + LOG_LEVEL_UNSPECIFIED = 0 + DEBUG = 1 + INFO = 2 + + class ImportJobScope(proto.Message): + r"""A boundary on the scope of impact that the metadata import + job can have. + + Attributes: + entry_groups (MutableSequence[str]): + Required. The entry group that is in scope for the import + job, specified as a relative resource name in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. + Only entries that belong to the specified entry group are + affected by the job. + + Must contain exactly one element. The entry group and the + job must be in the same location. + entry_types (MutableSequence[str]): + Required. The entry types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. + The job modifies only the entries that belong to these entry + types. + + If the metadata import file attempts to modify an entry + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an entry type must either match the location + of the job, or the entry type must be global. + aspect_types (MutableSequence[str]): + Optional. The aspect types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + The job modifies only the aspects that belong to these + aspect types. + + If the metadata import file attempts to modify an aspect + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an aspect type must either match the + location of the job, or the aspect type must be global. + """ + + entry_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entry_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + source_storage_uri: str = proto.Field( + proto.STRING, + number=1, + ) + source_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + scope: "MetadataJob.ImportJobSpec.ImportJobScope" = proto.Field( + proto.MESSAGE, + number=2, + message="MetadataJob.ImportJobSpec.ImportJobScope", + ) + entry_sync_mode: "MetadataJob.ImportJobSpec.SyncMode" = proto.Field( + proto.ENUM, + number=3, + enum="MetadataJob.ImportJobSpec.SyncMode", + ) + aspect_sync_mode: "MetadataJob.ImportJobSpec.SyncMode" = proto.Field( + proto.ENUM, + number=4, + enum="MetadataJob.ImportJobSpec.SyncMode", + ) + log_level: "MetadataJob.ImportJobSpec.LogLevel" = proto.Field( + proto.ENUM, + number=6, + enum="MetadataJob.ImportJobSpec.LogLevel", + ) + + class Status(proto.Message): + r"""Metadata job status. + + Attributes: + state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): + Output only. State of the metadata job. + message (str): + Output only. Message relating to the + progression of a metadata job. + completion_percent (int): + Output only. Progress tracking. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + + class State(proto.Enum): + r"""State of a metadata job. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + QUEUED (1): + The job is queued. + RUNNING (2): + The job is running. + CANCELING (3): + The job is being canceled. + CANCELED (4): + The job is canceled. + SUCCEEDED (5): + The job succeeded. + FAILED (6): + The job failed. + SUCCEEDED_WITH_ERRORS (7): + The job completed with some errors. + """ + STATE_UNSPECIFIED = 0 + QUEUED = 1 + RUNNING = 2 + CANCELING = 3 + CANCELED = 4 + SUCCEEDED = 5 + FAILED = 6 + SUCCEEDED_WITH_ERRORS = 7 + + state: "MetadataJob.Status.State" = proto.Field( + proto.ENUM, + number=1, + enum="MetadataJob.Status.State", + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + completion_percent: int = proto.Field( + proto.INT32, + number=3, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + import_spec: ImportJobSpec = proto.Field( + proto.MESSAGE, + number=100, + oneof="spec", + message=ImportJobSpec, + ) + import_result: ImportJobResult = proto.Field( + proto.MESSAGE, + number=200, + oneof="result", + message=ImportJobResult, + ) + status: Status = proto.Field( + proto.MESSAGE, + number=7, + message=Status, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py similarity index 92% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py index 434fb578a40b..8aa7e98b9e83 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.dataplex.v1', + package="google.cloud.dataplex.v1", manifest={ - 'DataDiscoverySpec', - 'DataDiscoveryResult', + "DataDiscoverySpec", + "DataDiscoveryResult", }, ) @@ -57,6 +56,7 @@ class BigQueryPublishingConfig(proto.Message): tables. Must be in the form ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` """ + class TableType(proto.Enum): r"""Determines how discovered tables are published. @@ -78,10 +78,12 @@ class TableType(proto.Enum): EXTERNAL = 1 BIGLAKE = 2 - table_type: 'DataDiscoverySpec.BigQueryPublishingConfig.TableType' = proto.Field( - proto.ENUM, - number=2, - enum='DataDiscoverySpec.BigQueryPublishingConfig.TableType', + table_type: "DataDiscoverySpec.BigQueryPublishingConfig.TableType" = ( + proto.Field( + proto.ENUM, + number=2, + enum="DataDiscoverySpec.BigQueryPublishingConfig.TableType", + ) ) connection: str = proto.Field( proto.STRING, @@ -192,15 +194,15 @@ class JsonOptions(proto.Message): proto.STRING, number=2, ) - csv_options: 'DataDiscoverySpec.StorageConfig.CsvOptions' = proto.Field( + csv_options: "DataDiscoverySpec.StorageConfig.CsvOptions" = proto.Field( proto.MESSAGE, number=3, - message='DataDiscoverySpec.StorageConfig.CsvOptions', + message="DataDiscoverySpec.StorageConfig.CsvOptions", ) - json_options: 'DataDiscoverySpec.StorageConfig.JsonOptions' = proto.Field( + json_options: "DataDiscoverySpec.StorageConfig.JsonOptions" = proto.Field( proto.MESSAGE, number=4, - message='DataDiscoverySpec.StorageConfig.JsonOptions', + message="DataDiscoverySpec.StorageConfig.JsonOptions", ) bigquery_publishing_config: BigQueryPublishingConfig = proto.Field( @@ -211,7 +213,7 @@ class JsonOptions(proto.Message): storage_config: StorageConfig = proto.Field( proto.MESSAGE, number=100, - oneof='resource_config', + oneof="resource_config", message=StorageConfig, ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index c52550c7eea7..1b0558266596 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -209,15 +209,17 @@ class ProfileInfo(proto.Message): distinct_ratio (float): Ratio of rows with distinct values against total scanned rows. Not available for complex - non-groupable field type RECORD and fields with - REPEATABLE mode. + non-groupable field type, including RECORD, + ARRAY, GEOGRAPHY, and JSON, as well as fields + with REPEATABLE mode. top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): The list of top N non-null values, frequency and ratio with which they occur in the scanned data. N is 10 or equal to the number of distinct values in the field, whichever is smaller. Not - available for complex non-groupable field type - RECORD and fields with REPEATABLE mode. + available for complex non-groupable field type, + including RECORD, ARRAY, GEOGRAPHY, and JSON, as + well as fields with REPEATABLE mode. string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): String type field information. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index b071fe7e6e2d..e46f60dcc3c8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -472,7 +472,7 @@ class DataQualityDimension(proto.Message): name (str): The dimension name a rule belongs to. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "INTEGRITY"] + "UNIQUENESS", "FRESHNESS", "VOLUME"] """ name: str = proto.Field( @@ -557,7 +557,7 @@ class DataQualityRule(proto.Message): Required. The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "INTEGRITY"]** + "UNIQUENESS", "FRESHNESS", "VOLUME"]** threshold (float): Optional. The minimum ratio of **passing_rows / total_rows** required to pass this rule, with a range of [0.0, 1.0]. @@ -577,6 +577,9 @@ class DataQualityRule(proto.Message): Optional. Description of the rule. - The maximum length is 1,024 characters. + suspended (bool): + Optional. Whether the Rule is active or + suspended. Default is false. """ class RangeExpectation(proto.Message): @@ -875,6 +878,10 @@ class SqlAssertion(proto.Message): proto.STRING, number=505, ) + suspended: bool = proto.Field( + proto.BOOL, + number=506, + ) class DataQualityColumnResult(proto.Message): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index e64a238b7129..eb0eea7e0688 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -22,6 +22,7 @@ import proto # type: ignore from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, processing, @@ -52,19 +53,22 @@ class DataScanType(proto.Enum): - r"""The type of DataScan. + r"""The type of data scan. Values: DATA_SCAN_TYPE_UNSPECIFIED (0): - The DataScan type is unspecified. + The data scan type is unspecified. DATA_QUALITY (1): - Data Quality scan. + Data quality scan. DATA_PROFILE (2): - Data Profile scan. + Data profile scan. + DATA_DISCOVERY (3): + Data discovery scan. """ DATA_SCAN_TYPE_UNSPECIFIED = 0 DATA_QUALITY = 1 DATA_PROFILE = 2 + DATA_DISCOVERY = 3 class CreateDataScanRequest(proto.Message): @@ -543,20 +547,29 @@ class DataScan(proto.Message): type_ (google.cloud.dataplex_v1.types.DataScanType): Output only. The type of DataScan. data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - DataQualityScan related setting. + Settings for a data quality scan. This field is a member of `oneof`_ ``spec``. data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - DataProfileScan related setting. + Settings for a data profile scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Settings for a data discovery scan. This field is a member of `oneof`_ ``spec``. data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of the data quality + Output only. The result of a data quality scan. This field is a member of `oneof`_ ``result``. data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of the data profile + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery scan. This field is a member of `oneof`_ ``result``. @@ -694,6 +707,12 @@ class ExecutionStatus(proto.Message): oneof="spec", message=data_profile.DataProfileSpec, ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof="spec", + message=data_discovery.DataDiscoverySpec, + ) data_quality_result: data_quality.DataQualityResult = proto.Field( proto.MESSAGE, number=200, @@ -706,6 +725,12 @@ class ExecutionStatus(proto.Message): oneof="result", message=data_profile.DataProfileResult, ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof="result", + message=data_discovery.DataDiscoveryResult, + ) class DataScanJob(proto.Message): @@ -728,6 +753,9 @@ class DataScanJob(proto.Message): uid (str): Output only. System generated globally unique ID for the DataScanJob. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was created. start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the DataScanJob was started. @@ -743,20 +771,32 @@ class DataScanJob(proto.Message): type_ (google.cloud.dataplex_v1.types.DataScanType): Output only. The type of the parent DataScan. data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Output only. DataQualityScan related setting. + Output only. Settings for a data quality + scan. This field is a member of `oneof`_ ``spec``. data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Output only. DataProfileScan related setting. + Output only. Settings for a data profile + scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Output only. Settings for a data discovery + scan. This field is a member of `oneof`_ ``spec``. data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of the data quality + Output only. The result of a data quality scan. This field is a member of `oneof`_ ``result``. data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of the data profile + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery scan. This field is a member of `oneof`_ ``result``. @@ -799,6 +839,11 @@ class State(proto.Enum): proto.STRING, number=2, ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, @@ -835,6 +880,12 @@ class State(proto.Enum): oneof="spec", message=data_profile.DataProfileSpec, ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof="spec", + message=data_discovery.DataDiscoverySpec, + ) data_quality_result: data_quality.DataQualityResult = proto.Field( proto.MESSAGE, number=200, @@ -847,6 +898,12 @@ class State(proto.Enum): oneof="result", message=data_profile.DataProfileResult, ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof="result", + message=data_discovery.DataDiscoveryResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index dd54639a3934..463d15aeffc6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -55,6 +55,9 @@ class DiscoveryEvent(proto.Message): The id of the associated asset. data_location (str): The data location associated with the event. + datascan_id (str): + The id of the associated datascan for + standalone discovery. type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): The type of the event being logged. config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): @@ -76,6 +79,11 @@ class DiscoveryEvent(proto.Message): Details about the action associated with the event. + This field is a member of `oneof`_ ``details``. + table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): + Details about the BigQuery table publishing + associated with the event. + This field is a member of `oneof`_ ``details``. """ @@ -106,6 +114,16 @@ class EventType(proto.Enum): PARTITION_DELETED (7): An event representing a partition being deleted. + TABLE_PUBLISHED (10): + An event representing a table being + published. + TABLE_UPDATED (11): + An event representing a table being updated. + TABLE_IGNORED (12): + An event representing a table being skipped + in publishing. + TABLE_DELETED (13): + An event representing a table being deleted. """ EVENT_TYPE_UNSPECIFIED = 0 CONFIG = 1 @@ -115,6 +133,10 @@ class EventType(proto.Enum): PARTITION_CREATED = 5 PARTITION_UPDATED = 6 PARTITION_DELETED = 7 + TABLE_PUBLISHED = 10 + TABLE_UPDATED = 11 + TABLE_IGNORED = 12 + TABLE_DELETED = 13 class EntityType(proto.Enum): r"""The type of the entity. @@ -131,6 +153,24 @@ class EntityType(proto.Enum): TABLE = 1 FILESET = 2 + class TableType(proto.Enum): + r"""The type of the published table. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + An unspecified table type. + EXTERNAL_TABLE (1): + External table type. + BIGLAKE_TABLE (2): + BigLake table type. + OBJECT_TABLE (3): + Object table type for unstructured data. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL_TABLE = 1 + BIGLAKE_TABLE = 2 + OBJECT_TABLE = 3 + class ConfigDetails(proto.Message): r"""Details about configuration events. @@ -170,6 +210,27 @@ class EntityDetails(proto.Message): enum="DiscoveryEvent.EntityType", ) + class TableDetails(proto.Message): + r"""Details about the published table. + + Attributes: + table (str): + The fully-qualified resource name of the + table resource. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): + The type of the table resource. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "DiscoveryEvent.TableType" = proto.Field( + proto.ENUM, + number=2, + enum="DiscoveryEvent.TableType", + ) + class PartitionDetails(proto.Message): r"""Details about the partition. @@ -213,12 +274,19 @@ class ActionDetails(proto.Message): type_ (str): The type of action. Eg. IncompatibleDataSchema, InvalidDataFormat + issue (str): + The human readable issue associated with the + action. """ type_: str = proto.Field( proto.STRING, number=1, ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) message: str = proto.Field( proto.STRING, @@ -240,6 +308,10 @@ class ActionDetails(proto.Message): proto.STRING, number=5, ) + datascan_id: str = proto.Field( + proto.STRING, + number=6, + ) type_: EventType = proto.Field( proto.ENUM, number=10, @@ -269,6 +341,12 @@ class ActionDetails(proto.Message): oneof="details", message=ActionDetails, ) + table: TableDetails = proto.Field( + proto.MESSAGE, + number=24, + oneof="details", + message=TableDetails, + ) class JobEvent(proto.Message): @@ -699,7 +777,6 @@ class EntityType(proto.Enum): class DataScanEvent(proto.Message): r"""These messages contain information about the execution of a datascan. The monitored resource is 'DataScan' - Next ID: 13 This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -770,10 +847,13 @@ class ScanType(proto.Enum): Data scan for data profile. DATA_QUALITY (2): Data scan for data quality. + DATA_DISCOVERY (4): + Data scan for data discovery. """ SCAN_TYPE_UNSPECIFIED = 0 DATA_PROFILE = 1 DATA_QUALITY = 2 + DATA_DISCOVERY = 4 class State(proto.Enum): r"""The job state of the data scan. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py index fa68e7f2e372..3fefb6d4d8ef 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py @@ -406,10 +406,9 @@ class SparkTaskConfig(proto.Message): This field is a member of `oneof`_ ``driver``. sql_script_file (str): - A reference to a query file. This can be the Cloud Storage - URI of the query file or it can the path to a SqlScript - Content. The execution args are used to declare a set of - script variables (``set key="value";``). + A reference to a query file. This should be the Cloud + Storage URI of the query file. The execution args are used + to declare a set of script variables (``set key="value";``). This field is a member of `oneof`_ ``driver``. sql_script (str): diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index e466410d1430..a7eb15b2cc0d 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,9 +8,164 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.3.1" + "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" + }, { "canonical": true, "clientMethod": { @@ -719,6 +874,183 @@ ], "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1463,7 +1795,168 @@ "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" }, { "name": "name", @@ -1482,14 +1975,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", "segments": [ { "end": 51, @@ -1522,7 +2015,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" }, { "canonical": true, @@ -1532,19 +2025,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryGroup" + "shortName": "GetEntryType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" }, { "name": "name", @@ -1563,14 +2056,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", "segments": [ { "end": 51, @@ -1603,7 +2096,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" }, { "canonical": true, @@ -1612,19 +2105,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryGroup" + "shortName": "GetEntryType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" }, { "name": "name", @@ -1643,14 +2136,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", "segments": [ { "end": 51, @@ -1683,7 +2176,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" }, { "canonical": true, @@ -1693,19 +2186,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryType" + "shortName": "GetEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" }, { "name": "name", @@ -1724,14 +2217,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", "segments": [ { "end": 51, @@ -1764,7 +2257,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" }, { "canonical": true, @@ -1773,19 +2266,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryType" + "shortName": "GetEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" }, { "name": "name", @@ -1804,14 +2297,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", "segments": [ { "end": 51, @@ -1844,7 +2337,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" }, { "canonical": true, @@ -1854,19 +2347,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntry" + "shortName": "GetMetadataJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" }, { "name": "name", @@ -1885,14 +2378,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", "segments": [ { "end": 51, @@ -1925,7 +2418,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" }, { "canonical": true, @@ -1934,19 +2427,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntry" + "shortName": "GetMetadataJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" }, { "name": "name", @@ -1965,14 +2458,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", "segments": [ { "end": 51, @@ -2005,7 +2498,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" }, { "canonical": true, @@ -2651,6 +3144,167 @@ ], "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index f5c81e4a5f46..6681941351bf 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -40,6 +40,7 @@ class dataplexCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'cancel_job': ('name', ), + 'cancel_metadata_job': ('name', ), 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), 'create_content': ('parent', 'content', 'validate_only', ), @@ -53,6 +54,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), + 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), 'create_partition': ('parent', 'partition', 'validate_only', ), 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), @@ -89,6 +91,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'get_iam_policy': ('resource', 'options', ), 'get_job': ('name', ), 'get_lake': ('name', ), + 'get_metadata_job': ('name', ), 'get_partition': ('name', ), 'get_task': ('name', ), 'get_zone': ('name', ), @@ -109,6 +112,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'list_jobs': ('parent', 'page_size', 'page_token', ), 'list_lake_actions': ('parent', 'page_size', 'page_token', ), 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 6198538a8234..72f4328e35a7 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -9568,6 +9568,1584 @@ async def test_search_entries_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateMetadataJobRequest, + dict, + ], +) +def test_create_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateMetadataJobRequest( + parent="parent_value", + metadata_job_id="metadata_job_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateMetadataJobRequest( + parent="parent_value", + metadata_job_id="metadata_job_id_value", + ) + + +def test_create_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_metadata_job in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_metadata_job + ] = mock_rpc + request = {} + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_metadata_job + ] = mock_rpc + + request = {} + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.CreateMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_from_dict(): + await test_create_metadata_job_async(request_type=dict) + + +def test_create_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metadata_job( + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name="name_value") + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = "metadata_job_id_value" + assert arg == mock_val + + +def test_create_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metadata_job( + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name="name_value") + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = "metadata_job_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetMetadataJobRequest, + dict, + ], +) +def test_get_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + response = client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +def test_get_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetMetadataJobRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetMetadataJobRequest( + name="name_value", + ) + + +def test_get_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_metadata_job + ] = mock_rpc + request = {} + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_metadata_job + ] = mock_rpc + + request = {} + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.GetMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + response = await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_from_dict(): + await test_get_metadata_job_async(request_type=dict) + + +def test_get_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListMetadataJobsRequest, + dict, + ], +) +def test_list_metadata_jobs(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + response = client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListMetadataJobsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_metadata_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListMetadataJobsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_metadata_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_metadata_jobs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_metadata_jobs + ] = mock_rpc + request = {} + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_metadata_jobs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_metadata_jobs + ] = mock_rpc + + request = {} + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async( + transport: str = "grpc_asyncio", request_type=catalog.ListMetadataJobsRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + response = await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_from_dict(): + await test_list_metadata_jobs_async(request_type=dict) + + +def test_list_metadata_jobs_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse() + ) + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_metadata_jobs_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metadata_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_metadata_jobs_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metadata_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) + + +def test_list_metadata_jobs_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in results) + + +def test_list_metadata_jobs_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metadata_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metadata_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_metadata_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CancelMetadataJobRequest, + dict, + ], +) +def test_cancel_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CancelMetadataJobRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CancelMetadataJobRequest( + name="name_value", + ) + + +def test_cancel_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_metadata_job in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_metadata_job + ] = mock_rpc + request = {} + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_metadata_job + ] = mock_rpc + + request = {} + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.CancelMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_from_dict(): + await test_cancel_metadata_job_async(request_type=dict) + + +def test_cancel_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CatalogServiceGrpcTransport( @@ -10157,6 +11735,96 @@ def test_search_entries_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -10806,6 +12474,117 @@ async def test_search_entries_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CatalogServiceClient( @@ -10861,6 +12640,10 @@ def test_catalog_service_base_transport(): "get_entry", "lookup_entry", "search_entries", + "create_metadata_job", + "get_metadata_job", + "list_metadata_jobs", + "cancel_metadata_job", "get_location", "list_locations", "get_operation", @@ -11362,8 +13145,36 @@ def test_parse_entry_type_path(): assert expected == actual +def test_metadata_job_path(): + project = "whelk" + location = "octopus" + metadataJob = "oyster" + expected = ( + "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( + project=project, + location=location, + metadataJob=metadataJob, + ) + ) + actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) + assert expected == actual + + +def test_parse_metadata_job_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "metadataJob": "mussel", + } + path = CatalogServiceClient.metadata_job_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_metadata_job_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -11373,7 +13184,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nautilus", } path = CatalogServiceClient.common_billing_account_path(**expected) @@ -11383,7 +13194,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -11393,7 +13204,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "abalone", } path = CatalogServiceClient.common_folder_path(**expected) @@ -11403,7 +13214,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -11413,7 +13224,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "clam", } path = CatalogServiceClient.common_organization_path(**expected) @@ -11423,7 +13234,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -11433,7 +13244,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "octopus", } path = CatalogServiceClient.common_project_path(**expected) @@ -11443,8 +13254,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -11455,8 +13266,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = CatalogServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index ada3a77c02bd..2dc3372cf740 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -71,6 +71,7 @@ transports, ) from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -5508,10 +5509,38 @@ def test_data_scan_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_data_scan_path(): +def test_connection_path(): project = "squid" location = "clam" - dataScan = "whelk" + connection = "whelk" + expected = ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + actual = DataScanServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = DataScanServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_connection_path(path) + assert expected == actual + + +def test_data_scan_path(): + project = "cuttlefish" + location = "mussel" + dataScan = "winkle" expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format( project=project, location=location, @@ -5523,9 +5552,9 @@ def test_data_scan_path(): def test_parse_data_scan_path(): expected = { - "project": "octopus", - "location": "oyster", - "dataScan": "nudibranch", + "project": "nautilus", + "location": "scallop", + "dataScan": "abalone", } path = DataScanServiceClient.data_scan_path(**expected) @@ -5535,10 +5564,10 @@ def test_parse_data_scan_path(): def test_data_scan_job_path(): - project = "cuttlefish" - location = "mussel" - dataScan = "winkle" - job = "nautilus" + project = "squid" + location = "clam" + dataScan = "whelk" + job = "octopus" expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format( project=project, location=location, @@ -5551,10 +5580,10 @@ def test_data_scan_job_path(): def test_parse_data_scan_job_path(): expected = { - "project": "scallop", - "location": "abalone", - "dataScan": "squid", - "job": "clam", + "project": "oyster", + "location": "nudibranch", + "dataScan": "cuttlefish", + "job": "mussel", } path = DataScanServiceClient.data_scan_job_path(**expected) @@ -5563,12 +5592,35 @@ def test_parse_data_scan_job_path(): assert expected == actual +def test_dataset_path(): + project = "winkle" + dataset = "nautilus" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + actual = DataScanServiceClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "scallop", + "dataset": "abalone", + } + path = DataScanServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_dataset_path(path) + assert expected == actual + + def test_entity_path(): - project = "whelk" - location = "octopus" - lake = "oyster" - zone = "nudibranch" - entity = "cuttlefish" + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format( project=project, location=location, @@ -5582,11 +5634,11 @@ def test_entity_path(): def test_parse_entity_path(): expected = { - "project": "mussel", - "location": "winkle", - "lake": "nautilus", - "zone": "scallop", - "entity": "abalone", + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", } path = DataScanServiceClient.entity_path(**expected) @@ -5596,7 +5648,7 @@ def test_parse_entity_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5606,7 +5658,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "abalone", } path = DataScanServiceClient.common_billing_account_path(**expected) @@ -5616,7 +5668,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -5626,7 +5678,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "clam", } path = DataScanServiceClient.common_folder_path(**expected) @@ -5636,7 +5688,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -5646,7 +5698,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "octopus", } path = DataScanServiceClient.common_organization_path(**expected) @@ -5656,7 +5708,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -5666,7 +5718,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nudibranch", } path = DataScanServiceClient.common_project_path(**expected) @@ -5676,8 +5728,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5688,8 +5740,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "winkle", + "location": "nautilus", } path = DataScanServiceClient.common_location_path(**expected)